mirror of
https://github.com/k3s-io/kubernetes.git
synced 2025-08-03 17:30:00 +00:00
Merge pull request #17382 from caesarxuchao/rewrite-linkchecker
Auto commit by PR queue bot
This commit is contained in:
commit
91527c29cf
5
Godeps/Godeps.json
generated
5
Godeps/Godeps.json
generated
@ -762,6 +762,11 @@
|
|||||||
"ImportPath": "github.com/mitchellh/mapstructure",
|
"ImportPath": "github.com/mitchellh/mapstructure",
|
||||||
"Rev": "740c764bc6149d3f1806231418adb9f52c11bcbf"
|
"Rev": "740c764bc6149d3f1806231418adb9f52c11bcbf"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/mvdan/xurls",
|
||||||
|
"Comment": "v0.8.0-14-g1b768d7",
|
||||||
|
"Rev": "1b768d7c393abd8e8dda1458385a57becd4b2d4e"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/mxk/go-flowrate/flowrate",
|
"ImportPath": "github.com/mxk/go-flowrate/flowrate",
|
||||||
"Rev": "cca7078d478f8520f85629ad7c68962d31ed7682"
|
"Rev": "cca7078d478f8520f85629ad7c68962d31ed7682"
|
||||||
|
1
Godeps/LICENSES.md
generated
1
Godeps/LICENSES.md
generated
@ -61,6 +61,7 @@ github.com/matttproud/golang_protobuf_extensions | Apache-2
|
|||||||
github.com/mesos/mesos-go | Apache-2
|
github.com/mesos/mesos-go | Apache-2
|
||||||
github.com/miekg/dns | spdxBSD3
|
github.com/miekg/dns | spdxBSD3
|
||||||
github.com/mitchellh/mapstructure | MITname
|
github.com/mitchellh/mapstructure | MITname
|
||||||
|
github.com/mvdan/xurls | spdxBSD3
|
||||||
github.com/mxk/go-flowrate | spdxBSD3
|
github.com/mxk/go-flowrate | spdxBSD3
|
||||||
github.com/onsi/ginkgo | spdxMIT
|
github.com/onsi/ginkgo | spdxMIT
|
||||||
github.com/onsi/gomega | spdxMIT
|
github.com/onsi/gomega | spdxMIT
|
||||||
|
3
Godeps/_workspace/src/github.com/mvdan/xurls/.gitignore
generated
vendored
Normal file
3
Godeps/_workspace/src/github.com/mvdan/xurls/.gitignore
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
cmd/xurls/xurls
|
||||||
|
generate/tldsgen/tldsgen
|
||||||
|
generate/regexgen/regexgen
|
5
Godeps/_workspace/src/github.com/mvdan/xurls/.travis.yml
generated
vendored
Normal file
5
Godeps/_workspace/src/github.com/mvdan/xurls/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
language: go
|
||||||
|
|
||||||
|
go:
|
||||||
|
- 1.4.3
|
||||||
|
- 1.5.1
|
27
Godeps/_workspace/src/github.com/mvdan/xurls/LICENSE
generated
vendored
Normal file
27
Godeps/_workspace/src/github.com/mvdan/xurls/LICENSE
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
Copyright (c) 2015, Daniel Martí. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google Inc. nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
31
Godeps/_workspace/src/github.com/mvdan/xurls/README.md
generated
vendored
Normal file
31
Godeps/_workspace/src/github.com/mvdan/xurls/README.md
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
# xurls
|
||||||
|
|
||||||
|
[](https://godoc.org/github.com/mvdan/xurls) [](https://travis-ci.org/mvdan/xurls)
|
||||||
|
|
||||||
|
Extract urls from text using regular expressions.
|
||||||
|
|
||||||
|
go get github.com/mvdan/xurls
|
||||||
|
|
||||||
|
```go
|
||||||
|
import "github.com/mvdan/xurls"
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
xurls.Relaxed.FindString("Do gophers live in golang.org?")
|
||||||
|
// "golang.org"
|
||||||
|
xurls.Relaxed.FindAllString("foo.com is http://foo.com/.", -1)
|
||||||
|
// []string{"foo.com", "http://foo.com/"}
|
||||||
|
xurls.Strict.FindAllString("foo.com is http://foo.com/.", -1)
|
||||||
|
// []string{"http://foo.com/"}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### cmd/xurls
|
||||||
|
|
||||||
|
Reads text and prints one url per line.
|
||||||
|
|
||||||
|
go get github.com/mvdan/xurls/cmd/xurls
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ echo "Do gophers live in http://golang.org?" | xurls
|
||||||
|
http://golang.org
|
||||||
|
```
|
83
Godeps/_workspace/src/github.com/mvdan/xurls/cmd/xurls/main.go
generated
vendored
Normal file
83
Godeps/_workspace/src/github.com/mvdan/xurls/cmd/xurls/main.go
generated
vendored
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
|
||||||
|
// See LICENSE for licensing information
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
|
||||||
|
"github.com/mvdan/xurls"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
matching = flag.String("m", "", "")
|
||||||
|
relaxed = flag.Bool("r", false, "")
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
flag.Usage = func() {
|
||||||
|
p := func(format string, a ...interface{}) {
|
||||||
|
fmt.Fprintf(os.Stderr, format, a...)
|
||||||
|
}
|
||||||
|
p("Usage: xurls [-h] [files]\n\n")
|
||||||
|
p("If no files are given, it reads from standard input.\n\n")
|
||||||
|
p(" -m <regexp> only match urls whose scheme matches a regexp\n")
|
||||||
|
p(" example: 'https?://|mailto:'\n")
|
||||||
|
p(" -r also match urls without a scheme (relaxed)\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func scanPath(re *regexp.Regexp, path string) error {
|
||||||
|
r := os.Stdin
|
||||||
|
if path != "-" {
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
r = f
|
||||||
|
}
|
||||||
|
scanner := bufio.NewScanner(r)
|
||||||
|
scanner.Split(bufio.ScanWords)
|
||||||
|
for scanner.Scan() {
|
||||||
|
word := scanner.Text()
|
||||||
|
for _, match := range re.FindAllString(word, -1) {
|
||||||
|
fmt.Println(match)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return scanner.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Parse()
|
||||||
|
if *relaxed && *matching != "" {
|
||||||
|
errExit(fmt.Errorf("-r and -m at the same time don't make much sense"))
|
||||||
|
}
|
||||||
|
re := xurls.Strict
|
||||||
|
if *relaxed {
|
||||||
|
re = xurls.Relaxed
|
||||||
|
} else if *matching != "" {
|
||||||
|
var err error
|
||||||
|
if re, err = xurls.StrictMatchingScheme(*matching); err != nil {
|
||||||
|
errExit(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
args := flag.Args()
|
||||||
|
if len(args) == 0 {
|
||||||
|
args = []string{"-"}
|
||||||
|
}
|
||||||
|
for _, path := range args {
|
||||||
|
if err := scanPath(re, path); err != nil {
|
||||||
|
errExit(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func errExit(err error) {
|
||||||
|
fmt.Fprintf(os.Stderr, "%v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
70
Godeps/_workspace/src/github.com/mvdan/xurls/generate/regexgen/main.go
generated
vendored
Normal file
70
Godeps/_workspace/src/github.com/mvdan/xurls/generate/regexgen/main.go
generated
vendored
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
|
||||||
|
// See LICENSE for licensing information
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
|
||||||
|
"golang.org/x/net/idna"
|
||||||
|
|
||||||
|
"github.com/mvdan/xurls"
|
||||||
|
)
|
||||||
|
|
||||||
|
const path = "regex.go"
|
||||||
|
|
||||||
|
var regexTmpl = template.Must(template.New("regex").Parse(`// Generated by regexgen
|
||||||
|
|
||||||
|
package xurls
|
||||||
|
|
||||||
|
const ({{ range $key, $value := . }}
|
||||||
|
{{$key}} = ` + "`" + `{{$value}}` + "`" + `{{end}}
|
||||||
|
)
|
||||||
|
`))
|
||||||
|
|
||||||
|
func writeRegex(tlds []string) error {
|
||||||
|
allTldsSet := make(map[string]struct{})
|
||||||
|
add := func(tld string) {
|
||||||
|
if _, e := allTldsSet[tld]; e {
|
||||||
|
log.Fatalf("Duplicate TLD: %s", tld)
|
||||||
|
}
|
||||||
|
allTldsSet[tld] = struct{}{}
|
||||||
|
}
|
||||||
|
for _, tldlist := range [...][]string{tlds, xurls.PseudoTLDs} {
|
||||||
|
for _, tld := range tldlist {
|
||||||
|
add(tld)
|
||||||
|
asciiTld, err := idna.ToASCII(tld)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if asciiTld != tld {
|
||||||
|
add(asciiTld)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var allTlds []string
|
||||||
|
for tld := range allTldsSet {
|
||||||
|
allTlds = append(allTlds, tld)
|
||||||
|
}
|
||||||
|
sort.Strings(allTlds)
|
||||||
|
f, err := os.Create(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
return regexTmpl.Execute(f, map[string]string{
|
||||||
|
"gtld ": `(?i)(` + strings.Join(allTlds, `|`) + `)(?-i)`,
|
||||||
|
"otherScheme": `(?i)(` + strings.Join(xurls.SchemesNoAuthority, `|`) + `)(?-i):`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
log.Printf("Generating %s...", path)
|
||||||
|
if err := writeRegex(xurls.TLDs); err != nil {
|
||||||
|
log.Fatalf("Could not write %s: %v", path, err)
|
||||||
|
}
|
||||||
|
}
|
140
Godeps/_workspace/src/github.com/mvdan/xurls/generate/tldsgen/main.go
generated
vendored
Normal file
140
Godeps/_workspace/src/github.com/mvdan/xurls/generate/tldsgen/main.go
generated
vendored
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
|
||||||
|
// See LICENSE for licensing information
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"errors"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"text/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
const path = "tlds.go"
|
||||||
|
|
||||||
|
var tldsTmpl = template.Must(template.New("tlds").Parse(`// Generated by tldsgen
|
||||||
|
|
||||||
|
package xurls
|
||||||
|
|
||||||
|
// TLDs is a sorted list of all public top-level domains.
|
||||||
|
//
|
||||||
|
// Sources:{{range $_, $url := .URLs}}
|
||||||
|
// * {{$url}}{{end}}
|
||||||
|
var TLDs = []string{
|
||||||
|
{{range $_, $tld := .TLDs}}` + "\t`" + `{{$tld}}` + "`" + `,
|
||||||
|
{{end}}}
|
||||||
|
`))
|
||||||
|
|
||||||
|
func cleanTld(tld string) string {
|
||||||
|
tld = strings.ToLower(tld)
|
||||||
|
if strings.HasPrefix(tld, "xn--") {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return tld
|
||||||
|
}
|
||||||
|
|
||||||
|
func fetchFromURL(url, pat string) {
|
||||||
|
defer wg.Done()
|
||||||
|
log.Printf("Fetching %s", url)
|
||||||
|
resp, err := http.Get(url)
|
||||||
|
if err == nil && resp.StatusCode >= 400 {
|
||||||
|
err = errors.New(resp.Status)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
errChan <- err
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
scanner := bufio.NewScanner(resp.Body)
|
||||||
|
re := regexp.MustCompile(pat)
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
tld := re.FindString(line)
|
||||||
|
tld = cleanTld(tld)
|
||||||
|
if tld == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
tldChan <- tld
|
||||||
|
}
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
errChan <- err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
wg sync.WaitGroup
|
||||||
|
tldChan = make(chan string)
|
||||||
|
errChan = make(chan error)
|
||||||
|
)
|
||||||
|
|
||||||
|
func tldList() ([]string, []string, error) {
|
||||||
|
var urls []string
|
||||||
|
fromURL := func(url, pat string) {
|
||||||
|
urls = append(urls, url)
|
||||||
|
wg.Add(1)
|
||||||
|
go fetchFromURL(url, pat)
|
||||||
|
}
|
||||||
|
fromURL("https://data.iana.org/TLD/tlds-alpha-by-domain.txt",
|
||||||
|
`^[^#]+$`)
|
||||||
|
fromURL("https://publicsuffix.org/list/effective_tld_names.dat",
|
||||||
|
`^[^/.]+$`)
|
||||||
|
|
||||||
|
tldSet := make(map[string]struct{})
|
||||||
|
anyError := false
|
||||||
|
go func() {
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case tld := <-tldChan:
|
||||||
|
tldSet[tld] = struct{}{}
|
||||||
|
case err := <-errChan:
|
||||||
|
log.Printf("%v", err)
|
||||||
|
anyError = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
|
if anyError {
|
||||||
|
return nil, nil, errors.New("there were some errors while fetching the TLDs")
|
||||||
|
}
|
||||||
|
|
||||||
|
tlds := make([]string, 0, len(tldSet))
|
||||||
|
for tld := range tldSet {
|
||||||
|
tlds = append(tlds, tld)
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Strings(tlds)
|
||||||
|
return tlds, urls, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeTlds(tlds, urls []string) error {
|
||||||
|
f, err := os.Create(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
return tldsTmpl.Execute(f, struct {
|
||||||
|
TLDs []string
|
||||||
|
URLs []string
|
||||||
|
}{
|
||||||
|
TLDs: tlds,
|
||||||
|
URLs: urls,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
tlds, urls, err := tldList()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Could not get TLD list: %v", err)
|
||||||
|
}
|
||||||
|
log.Printf("Generating %s...", path)
|
||||||
|
if err := writeTlds(tlds, urls); err != nil {
|
||||||
|
log.Fatalf("Could not write path: %v", err)
|
||||||
|
}
|
||||||
|
}
|
8
Godeps/_workspace/src/github.com/mvdan/xurls/regex.go
generated
vendored
Normal file
8
Godeps/_workspace/src/github.com/mvdan/xurls/regex.go
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
Godeps/_workspace/src/github.com/mvdan/xurls/schemes.go
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/mvdan/xurls/schemes.go
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
package xurls
|
||||||
|
|
||||||
|
// SchemesNoAuthority is a sorted list of some well-known url schemes that are
|
||||||
|
// followed by ":" instead of "://". Since these are more prone to false
|
||||||
|
// positives, we limit their matching.
|
||||||
|
var SchemesNoAuthority = []string{
|
||||||
|
`bitcoin`, // Bitcoin
|
||||||
|
`file`, // Files
|
||||||
|
`magnet`, // Torrent magnets
|
||||||
|
`mailto`, // Mail
|
||||||
|
`sms`, // SMS
|
||||||
|
`tel`, // Telephone
|
||||||
|
`xmpp`, // XMPP
|
||||||
|
}
|
1555
Godeps/_workspace/src/github.com/mvdan/xurls/tlds.go
generated
vendored
Normal file
1555
Godeps/_workspace/src/github.com/mvdan/xurls/tlds.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
22
Godeps/_workspace/src/github.com/mvdan/xurls/tlds_pseudo.go
generated
vendored
Normal file
22
Godeps/_workspace/src/github.com/mvdan/xurls/tlds_pseudo.go
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
package xurls
|
||||||
|
|
||||||
|
// PseudoTLDs is a sorted list of some widely used unofficial TLDs.
|
||||||
|
//
|
||||||
|
// Sources:
|
||||||
|
// * https://en.wikipedia.org/wiki/Pseudo-top-level_domain
|
||||||
|
// * https://en.wikipedia.org/wiki/Category:Pseudo-top-level_domains
|
||||||
|
// * https://tools.ietf.org/html/draft-grothoff-iesg-special-use-p2p-names-00
|
||||||
|
// * https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml
|
||||||
|
var PseudoTLDs = []string{
|
||||||
|
`bit`, // Namecoin
|
||||||
|
`example`, // Example domain
|
||||||
|
`exit`, // Tor exit node
|
||||||
|
`gnu`, // GNS by public key
|
||||||
|
`i2p`, // I2P network
|
||||||
|
`invalid`, // Invalid domain
|
||||||
|
`local`, // Local network
|
||||||
|
`localhost`, // Local network
|
||||||
|
`onion`, // Tor hidden services
|
||||||
|
`test`, // Test domain
|
||||||
|
`zkey`, // GNS domain name
|
||||||
|
}
|
66
Godeps/_workspace/src/github.com/mvdan/xurls/xurls.go
generated
vendored
Normal file
66
Godeps/_workspace/src/github.com/mvdan/xurls/xurls.go
generated
vendored
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
|
||||||
|
// See LICENSE for licensing information
|
||||||
|
|
||||||
|
// Package xurls extracts urls from plain text using regular expressions.
|
||||||
|
package xurls
|
||||||
|
|
||||||
|
import "regexp"
|
||||||
|
|
||||||
|
//go:generate go run generate/tldsgen/main.go
|
||||||
|
//go:generate go run generate/regexgen/main.go
|
||||||
|
|
||||||
|
const (
|
||||||
|
letter = `\p{L}`
|
||||||
|
number = `\p{N}`
|
||||||
|
iriChar = letter + number
|
||||||
|
currency = `\p{Sc}`
|
||||||
|
otherSymb = `\p{So}`
|
||||||
|
endChar = iriChar + `/\-+_&~*%=#` + currency
|
||||||
|
midChar = endChar + `@.,:;'?!|` + otherSymb
|
||||||
|
wellParen = `\([` + midChar + `]*(\([` + midChar + `]*\)[` + midChar + `]*)*\)`
|
||||||
|
wellBrack = `\[[` + midChar + `]*(\[[` + midChar + `]*\][` + midChar + `]*)*\]`
|
||||||
|
wellBrace = `\{[` + midChar + `]*(\{[` + midChar + `]*\}[` + midChar + `]*)*\}`
|
||||||
|
wellAll = wellParen + `|` + wellBrack + `|` + wellBrace
|
||||||
|
pathCont = `([` + midChar + `]*(` + wellAll + `|[` + endChar + `])+)+`
|
||||||
|
comScheme = `[a-zA-Z][a-zA-Z.\-+]*://`
|
||||||
|
scheme = `(` + comScheme + `|` + otherScheme + `)`
|
||||||
|
|
||||||
|
iri = `[` + iriChar + `]([` + iriChar + `\-]*[` + iriChar + `])?`
|
||||||
|
domain = `(` + iri + `\.)+`
|
||||||
|
octet = `(25[0-5]|2[0-4][0-9]|1[0-9]{2}|[1-9][0-9]|[0-9])`
|
||||||
|
ipv4Addr = `\b` + octet + `\.` + octet + `\.` + octet + `\.` + octet + `\b`
|
||||||
|
ipv6Addr = `([0-9a-fA-F]{1,4}:([0-9a-fA-F]{1,4}:([0-9a-fA-F]{1,4}:([0-9a-fA-F]{1,4}:([0-9a-fA-F]{1,4}:[0-9a-fA-F]{0,4}|:[0-9a-fA-F]{1,4})?|(:[0-9a-fA-F]{1,4}){0,2})|(:[0-9a-fA-F]{1,4}){0,3})|(:[0-9a-fA-F]{1,4}){0,4})|:(:[0-9a-fA-F]{1,4}){0,5})((:[0-9a-fA-F]{1,4}){2}|:(25[0-5]|(2[0-4]|1[0-9]|[1-9])?[0-9])(\.(25[0-5]|(2[0-4]|1[0-9]|[1-9])?[0-9])){3})|(([0-9a-fA-F]{1,4}:){1,6}|:):[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){7}:`
|
||||||
|
ipAddr = `(` + ipv4Addr + `|` + ipv6Addr + `)`
|
||||||
|
site = domain + gtld
|
||||||
|
hostName = `(` + site + `|` + ipAddr + `)`
|
||||||
|
port = `(:[0-9]*)?`
|
||||||
|
path = `(/|/` + pathCont + `?|\b|$)`
|
||||||
|
webURL = hostName + port + path
|
||||||
|
|
||||||
|
strict = `(\b` + scheme + pathCont + `)`
|
||||||
|
relaxed = `(` + strict + `|` + webURL + `)`
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// Relaxed matches all the urls it can find.
|
||||||
|
Relaxed = regexp.MustCompile(relaxed)
|
||||||
|
// Strict only matches urls with a scheme to avoid false positives.
|
||||||
|
Strict = regexp.MustCompile(strict)
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
Relaxed.Longest()
|
||||||
|
Strict.Longest()
|
||||||
|
}
|
||||||
|
|
||||||
|
// StrictMatchingScheme produces a regexp that matches urls like Strict but
|
||||||
|
// whose scheme matches the given regular expression.
|
||||||
|
func StrictMatchingScheme(exp string) (*regexp.Regexp, error) {
|
||||||
|
strictMatching := `(\b(?i)(` + exp + `)(?-i)` + pathCont + `)`
|
||||||
|
re, err := regexp.Compile(strictMatching)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
re.Longest()
|
||||||
|
return re, nil
|
||||||
|
}
|
1
Makefile
1
Makefile
@ -49,7 +49,6 @@ verify:
|
|||||||
hack/verify-generated-deep-copies.sh
|
hack/verify-generated-deep-copies.sh
|
||||||
hack/verify-generated-docs.sh
|
hack/verify-generated-docs.sh
|
||||||
hack/verify-swagger-spec.sh
|
hack/verify-swagger-spec.sh
|
||||||
hack/verify-linkcheck.sh
|
|
||||||
hack/verify-flags-underscore.py
|
hack/verify-flags-underscore.py
|
||||||
hack/verify-godeps.sh $(BRANCH)
|
hack/verify-godeps.sh $(BRANCH)
|
||||||
hack/verify-godep-licenses.sh $(BRANCH)
|
hack/verify-godep-licenses.sh $(BRANCH)
|
||||||
|
@ -14,56 +14,155 @@ See the License for the specific language governing permissions and
|
|||||||
limitations under the License.
|
limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// This command checks if the hyperlinks in files are valid. It checks the files
|
// This tool extracts the links from types.go and .md files, visits the link and
|
||||||
// with 'fileSuffix' in 'rootDir' for URLs that match 'prefix'. It trims the
|
// checks the status code of the response.
|
||||||
// 'prefix' from the URL, uses what's left as the relative path to repoRoot to
|
// Usage:
|
||||||
// verify if the link is valid. For example:
|
// $ linkcheck --root-dir=${ROOT}
|
||||||
// $ linkcheck --root-dir=${TYPEROOT} --repo-root=${KUBE_ROOT} \
|
|
||||||
// --file-suffix=types.go --prefix=http://releases.k8s.io/HEAD
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/mvdan/xurls"
|
||||||
flag "github.com/spf13/pflag"
|
flag "github.com/spf13/pflag"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
httpRE *regexp.Regexp
|
|
||||||
|
|
||||||
rootDir = flag.String("root-dir", "", "Root directory containing documents to be processed.")
|
rootDir = flag.String("root-dir", "", "Root directory containing documents to be processed.")
|
||||||
repoRoot = flag.String("repo-root", "", `Root directory of k8s repository.`)
|
fileSuffix = flag.StringSlice("file-suffix", []string{"types.go", ".md"}, "suffix of files to be checked")
|
||||||
fileSuffix = flag.String("file-suffix", "", "suffix of files to be checked")
|
// URLs matching the patterns in the regWhiteList won't be checked. Patterns
|
||||||
prefix = flag.String("prefix", "", "Longest common prefix of the link URL, e.g., http://release.k8s.io/HEAD/ for links in pkg/api/types.go")
|
// of dummy URLs should be added to the list to avoid false alerts. Also,
|
||||||
|
// patterns of URLs that we don't care about can be added here to improve
|
||||||
|
// efficiency.
|
||||||
|
regWhiteList = []*regexp.Regexp{
|
||||||
|
regexp.MustCompile(`https://kubernetes-site\.appspot\.com`),
|
||||||
|
// skip url that doesn't start with an English alphabet, e.g., URLs with IP addresses.
|
||||||
|
regexp.MustCompile(`https?://[^A-Za-z].*`),
|
||||||
|
regexp.MustCompile(`https?://localhost.*`),
|
||||||
|
}
|
||||||
|
// URLs listed in the fullURLWhiteList won't be checked. This separated from
|
||||||
|
// the RegWhiteList to improve efficiency. This list includes dummy URLs that
|
||||||
|
// are hard to be generalized by a regex, and URLs that will cause false alerts.
|
||||||
|
fullURLWhiteList = map[string]struct{}{
|
||||||
|
"http://github.com/some/repo.git": {},
|
||||||
|
// This URL returns 404 when visited by this tool, but it works fine if visited by a browser.
|
||||||
|
"http://stackoverflow.com/questions/ask?tags=kubernetes": {},
|
||||||
|
"https://github.com/$YOUR_GITHUB_USERNAME/kubernetes.git": {},
|
||||||
|
"https://github.com/$YOUR_GITHUB_USERNAME/kubernetes": {},
|
||||||
|
"http://storage.googleapis.com/kubernetes-release/release/v${K8S_VERSION}/bin/darwin/amd64/kubectl": {},
|
||||||
|
// It seems this server expects certain User-Agent value, it works fine with Chrome, but returns 404 if we issue a plain cURL to it.
|
||||||
|
"http://supervisord.org/": {},
|
||||||
|
"http://kubernetes.io/vX.Y/docs": {},
|
||||||
|
"http://kubernetes.io/vX.Y/docs/": {},
|
||||||
|
"http://kubernetes.io/vX.Y/": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
visitedURLs = map[string]struct{}{}
|
||||||
|
htmlpreviewReg = regexp.MustCompile(`https://htmlpreview\.github\.io/\?`)
|
||||||
|
httpOrhttpsReg = regexp.MustCompile(`https?.*`)
|
||||||
)
|
)
|
||||||
|
|
||||||
func newWalkFunc(invalidLink *bool) filepath.WalkFunc {
|
func newWalkFunc(invalidLink *bool, client *http.Client) filepath.WalkFunc {
|
||||||
return func(filePath string, info os.FileInfo, err error) error {
|
return func(filePath string, info os.FileInfo, err error) error {
|
||||||
if !strings.HasSuffix(info.Name(), *fileSuffix) {
|
hasSuffix := false
|
||||||
|
for _, suffix := range *fileSuffix {
|
||||||
|
hasSuffix = hasSuffix || strings.HasSuffix(info.Name(), suffix)
|
||||||
|
}
|
||||||
|
if !hasSuffix {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
fileBytes, err := ioutil.ReadFile(filePath)
|
fileBytes, err := ioutil.ReadFile(filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
foundInvalid := false
|
foundInvalid := false
|
||||||
matches := httpRE.FindAllSubmatch(fileBytes, -1)
|
allURLs := xurls.Strict.FindAll(fileBytes, -1)
|
||||||
for _, match := range matches {
|
fmt.Fprintf(os.Stdout, "\nChecking file %s\n", filePath)
|
||||||
// match[1] should look like docs/devel/api-conventions.md
|
URL:
|
||||||
if _, err := os.Stat(path.Join(*repoRoot, string(match[1]))); err != nil {
|
for _, URL := range allURLs {
|
||||||
fmt.Fprintf(os.Stderr, "Link is not valid: %s\n", string(match[0]))
|
// Don't check non http/https URL
|
||||||
|
if !httpOrhttpsReg.Match(URL) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, whiteURL := range regWhiteList {
|
||||||
|
if whiteURL.Match(URL) {
|
||||||
|
continue URL
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _, found := fullURLWhiteList[string(URL)]; found {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// remove the htmlpreview Prefix
|
||||||
|
processedURL := htmlpreviewReg.ReplaceAll(URL, []byte{})
|
||||||
|
|
||||||
|
// check if we have visited the URL.
|
||||||
|
if _, found := visitedURLs[string(processedURL)]; found {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
visitedURLs[string(processedURL)] = struct{}{}
|
||||||
|
|
||||||
|
retry := 0
|
||||||
|
const maxRetry int = 3
|
||||||
|
backoff := 100
|
||||||
|
for retry < maxRetry {
|
||||||
|
fmt.Fprintf(os.Stdout, "Visiting %s\n", string(processedURL))
|
||||||
|
// Use verb HEAD to increase efficiency. However, some servers
|
||||||
|
// do not handle HEAD well, so we need to try a GET to avoid
|
||||||
|
// false alert.
|
||||||
|
resp, err := client.Head(string(processedURL))
|
||||||
|
// URLs with mock host or mock port will cause error. If we report
|
||||||
|
// the error here, people need to add the mock URL to the white
|
||||||
|
// list every time they add a mock URL, which will be a maintenance
|
||||||
|
// nightmare. Hence, we decide to only report 404 to catch the
|
||||||
|
// cases where host and port are legit, but path is not, which
|
||||||
|
// is the most common mistake in our docs.
|
||||||
|
if err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if resp.StatusCode == 429 {
|
||||||
|
retryAfter := resp.Header.Get("Retry-After")
|
||||||
|
if seconds, err := strconv.Atoi(retryAfter); err != nil {
|
||||||
|
backoff = seconds + 10
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, "Got %d visiting %s, retry after %d seconds.\n", resp.StatusCode, string(URL), backoff)
|
||||||
|
time.Sleep(time.Duration(backoff) * time.Second)
|
||||||
|
backoff *= 2
|
||||||
|
retry++
|
||||||
|
} else if resp.StatusCode == 404 {
|
||||||
|
// We only check for 404 error for now. 401, 403 errors are hard to handle.
|
||||||
|
|
||||||
|
// We need to try a GET to avoid false alert.
|
||||||
|
resp, err = client.Get(string(processedURL))
|
||||||
|
if err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if resp.StatusCode != 404 {
|
||||||
|
continue URL
|
||||||
|
}
|
||||||
|
|
||||||
|
foundInvalid = true
|
||||||
|
fmt.Fprintf(os.Stderr, "Failed: in file %s, Got %d visiting %s\n", filePath, resp.StatusCode, string(URL))
|
||||||
|
break
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if retry == maxRetry {
|
||||||
foundInvalid = true
|
foundInvalid = true
|
||||||
|
fmt.Fprintf(os.Stderr, "Failed: in file %s, still got 429 visiting %s after %d retries\n", filePath, string(URL), maxRetry)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if foundInvalid {
|
if foundInvalid {
|
||||||
fmt.Fprintf(os.Stderr, "Found invalid links in %s\n", filePath)
|
|
||||||
*invalidLink = true
|
*invalidLink = true
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
@ -72,14 +171,16 @@ func newWalkFunc(invalidLink *bool) filepath.WalkFunc {
|
|||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
httpRE = regexp.MustCompile(*prefix + `(.*\.md)`)
|
|
||||||
|
|
||||||
if *rootDir == "" || *repoRoot == "" || *prefix == "" {
|
if *rootDir == "" {
|
||||||
flag.Usage()
|
flag.Usage()
|
||||||
os.Exit(2)
|
os.Exit(2)
|
||||||
}
|
}
|
||||||
|
client := http.Client{
|
||||||
|
Timeout: time.Duration(5 * time.Second),
|
||||||
|
}
|
||||||
invalidLink := false
|
invalidLink := false
|
||||||
if err := filepath.Walk(*rootDir, newWalkFunc(&invalidLink)); err != nil {
|
if err := filepath.Walk(*rootDir, newWalkFunc(&invalidLink, &client)); err != nil {
|
||||||
fmt.Fprintf(os.Stderr, "Fail: %v.\n", err)
|
fmt.Fprintf(os.Stderr, "Fail: %v.\n", err)
|
||||||
os.Exit(2)
|
os.Exit(2)
|
||||||
}
|
}
|
||||||
|
@ -18,10 +18,6 @@
|
|||||||
If you are using a released version of Kubernetes, you should
|
If you are using a released version of Kubernetes, you should
|
||||||
refer to the docs that go with that version.
|
refer to the docs that go with that version.
|
||||||
|
|
||||||
<strong>
|
|
||||||
The latest 1.1.x release of this document can be found
|
|
||||||
[here](http://releases.k8s.io/release-1.1/docs/devel/how-to-doc.md).
|
|
||||||
|
|
||||||
Documentation for other releases can be found at
|
Documentation for other releases can be found at
|
||||||
[releases.k8s.io](http://releases.k8s.io).
|
[releases.k8s.io](http://releases.k8s.io).
|
||||||
</strong>
|
</strong>
|
||||||
|
@ -65,7 +65,7 @@ reservation grows), or running multiple Kubelets on a single node.
|
|||||||

|

|
||||||
|
|
||||||
1. **Node Capacity** - Already provided as
|
1. **Node Capacity** - Already provided as
|
||||||
[`NodeStatus.Capacity`](https://htmlpreview.github.io/?https://github.com/kubernetes/kubernetes/HEAD/docs/api-reference/v1/definitions.html#_v1_nodestatus),
|
[`NodeStatus.Capacity`](https://htmlpreview.github.io/?https://github.com/kubernetes/kubernetes/blob/HEAD/docs/api-reference/v1/definitions.html#_v1_nodestatus),
|
||||||
this is total capacity read from the node instance, and assumed to be constant.
|
this is total capacity read from the node instance, and assumed to be constant.
|
||||||
2. **System-Reserved** (proposed) - Compute resources reserved for processes which are not managed by
|
2. **System-Reserved** (proposed) - Compute resources reserved for processes which are not managed by
|
||||||
Kubernetes. Currently this covers all the processes lumped together in the `/system` raw
|
Kubernetes. Currently this covers all the processes lumped together in the `/system` raw
|
||||||
@ -81,7 +81,7 @@ reservation grows), or running multiple Kubelets on a single node.
|
|||||||
#### Allocatable
|
#### Allocatable
|
||||||
|
|
||||||
Add `Allocatable` (4) to
|
Add `Allocatable` (4) to
|
||||||
[`NodeStatus`](https://htmlpreview.github.io/?https://github.com/kubernetes/kubernetes/HEAD/docs/api-reference/v1/definitions.html#_v1_nodestatus):
|
[`NodeStatus`](https://htmlpreview.github.io/?https://github.com/kubernetes/kubernetes/blob/HEAD/docs/api-reference/v1/definitions.html#_v1_nodestatus):
|
||||||
|
|
||||||
```
|
```
|
||||||
type NodeStatus struct {
|
type NodeStatus struct {
|
||||||
|
@ -24,15 +24,36 @@ source "${KUBE_ROOT}/hack/lib/init.sh"
|
|||||||
kube::golang::setup_env
|
kube::golang::setup_env
|
||||||
linkcheck=$(kube::util::find-binary "linkcheck")
|
linkcheck=$(kube::util::find-binary "linkcheck")
|
||||||
|
|
||||||
TYPEROOT="${KUBE_ROOT}/pkg/api/"
|
kube::util::ensure-temp-dir
|
||||||
"${linkcheck}" "--root-dir=${TYPEROOT}" "--repo-root=${KUBE_ROOT}" "--file-suffix=types.go" "--prefix=http://releases.k8s.io/HEAD" && ret=0 || ret=$?
|
OUTPUT="${KUBE_TEMP}"/linkcheck-output
|
||||||
if [[ $ret -eq 1 ]]; then
|
cleanup() {
|
||||||
echo "links in ${TYPEROOT} is out of date."
|
rm -rf "${OUTPUT}"
|
||||||
exit 1
|
}
|
||||||
fi
|
trap "cleanup" EXIT SIGINT
|
||||||
if [[ $ret -gt 1 ]]; then
|
mkdir -p "$OUTPUT"
|
||||||
echo "Error running linkcheck"
|
|
||||||
exit 1
|
APIROOT="${KUBE_ROOT}/pkg/api/"
|
||||||
|
APISROOT="${KUBE_ROOT}/pkg/apis/"
|
||||||
|
DOCROOT="${KUBE_ROOT}/docs/"
|
||||||
|
ROOTS=($APIROOT $APISROOT $DOCROOT)
|
||||||
|
found_invalid=false
|
||||||
|
for root in "${ROOTS[@]}"; do
|
||||||
|
"${linkcheck}" "--root-dir=${root}" 2> >(tee -a "${OUTPUT}/error" >&2) && ret=0 || ret=$?
|
||||||
|
if [[ $ret -eq 1 ]]; then
|
||||||
|
echo "Failed: found invalid links in ${root}."
|
||||||
|
found_invalid=true
|
||||||
|
fi
|
||||||
|
if [[ $ret -gt 1 ]]; then
|
||||||
|
echo "Error running linkcheck"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ ${found_invalid} = true ]; then
|
||||||
|
echo "Summary of invalid links:"
|
||||||
|
cat ${OUTPUT}/error
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
trap "cleanup" EXIT SIGINT
|
||||||
|
|
||||||
# ex: ts=2 sw=2 et filetype=sh
|
# ex: ts=2 sw=2 et filetype=sh
|
||||||
|
@ -60,7 +60,7 @@ if $SILENT ; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# remove protobuf until it is part of direct generation
|
# remove protobuf until it is part of direct generation
|
||||||
EXCLUDE="verify-godeps.sh verify-godep-licenses.sh verify-generated-protobuf.sh"
|
EXCLUDE="verify-godeps.sh verify-godep-licenses.sh verify-generated-protobuf.sh verify-linkcheck.sh"
|
||||||
|
|
||||||
ret=0
|
ret=0
|
||||||
for t in `ls $KUBE_ROOT/hack/verify-*.sh`
|
for t in `ls $KUBE_ROOT/hack/verify-*.sh`
|
||||||
|
@ -122,16 +122,6 @@ else
|
|||||||
fi
|
fi
|
||||||
echo "${reset}"
|
echo "${reset}"
|
||||||
|
|
||||||
echo -ne "Checking for links in API descriptions... "
|
|
||||||
if ! hack/after-build/verify-linkcheck.sh > /dev/null; then
|
|
||||||
echo "${red}ERROR!"
|
|
||||||
echo "Some links in pkg/api/.*types.go are outdated. They require a manual fix."
|
|
||||||
exit_code=1
|
|
||||||
else
|
|
||||||
echo "${green}OK"
|
|
||||||
fi
|
|
||||||
echo "${reset}"
|
|
||||||
|
|
||||||
echo -ne "Checking for docs that need updating... "
|
echo -ne "Checking for docs that need updating... "
|
||||||
if ! hack/after-build/verify-generated-docs.sh > /dev/null; then
|
if ! hack/after-build/verify-generated-docs.sh > /dev/null; then
|
||||||
echo "${red}ERROR!"
|
echo "${red}ERROR!"
|
||||||
|
@ -56,7 +56,6 @@ install:
|
|||||||
- ./hack/verify-generated-docs.sh
|
- ./hack/verify-generated-docs.sh
|
||||||
- ./hack/verify-generated-swagger-docs.sh
|
- ./hack/verify-generated-swagger-docs.sh
|
||||||
- ./hack/verify-swagger-spec.sh
|
- ./hack/verify-swagger-spec.sh
|
||||||
- ./hack/verify-linkcheck.sh
|
|
||||||
|
|
||||||
script:
|
script:
|
||||||
# Disable coverage collection on pull requests
|
# Disable coverage collection on pull requests
|
||||||
|
Loading…
Reference in New Issue
Block a user