Merge pull request #8288 from cmaf/migrate-static-checks

Migrate static checks
This commit is contained in:
Chelsea Mafrica 2023-11-30 17:44:16 -08:00 committed by GitHub
commit 818b8f93b1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
68 changed files with 8438 additions and 0 deletions

2
tests/.gitignore vendored
View File

@ -1 +1,3 @@
cmd/check-markdown/kata-check-markdown
cmd/github-labels/kata-github-labels
integration/kubernetes/runtimeclass_workloads_work/

33
tests/.golangci.yml Normal file
View File

@ -0,0 +1,33 @@
# Copyright (c) 2017 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
run:
concurrency: 4
deadline: 600s
skip-dirs:
- vendor
# Ignore auto-generated protobuf code.
skip-files:
- ".*\\.pb\\.go$"
linters:
disable-all: true
enable:
- gocyclo
- gofmt
- gosimple
- govet
- ineffassign
- misspell
- staticcheck
- typecheck
- unused
linters-settings:
gocyclo:
min_complexity: 15
unused:
check-exported: true
govet:
enable:

View File

@ -0,0 +1,32 @@
#
# Copyright (c) 2017-2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
TARGET = kata-check-markdown
SOURCES = $(shell find . -type f 2>&1 | grep -E '.*\.go$$')
VERSION := ${shell cat ./VERSION}
COMMIT_NO := $(shell git rev-parse HEAD 2> /dev/null || true)
COMMIT := $(if $(shell git status --porcelain --untracked-files=no),"${COMMIT_NO}-dirty","${COMMIT_NO}")
BINDIR := $(GOPATH)/bin
DESTTARGET := $(abspath $(BINDIR)/$(TARGET))
default: install
check: $(SOURCES)
go test -v ./...
$(TARGET): $(SOURCES)
go build -o "$(TARGET)" -ldflags "-X main.name=${TARGET} -X main.commit=${COMMIT} -X main.version=${VERSION}" .
install: $(TARGET)
install -d $(shell dirname $(DESTTARGET))
install $(TARGET) $(DESTTARGET)
clean:
rm -f $(TARGET)
.PHONY: install clean

View File

@ -0,0 +1,57 @@
# Overview
The Kata Project comprises
[a number of GitHub repositories](https://github.com/kata-containers).
All these repositories contain documents written in
[GitHub-Flavoured Markdown](https://github.github.com/gfm)
format.
[Linking in documents is strongly encouraged](https://github.com/kata-containers/kata-containers/blob/main/docs/Documentation-Requirements.md)
but due to the number of internal and external document links, it is easy for
mistakes to be made. Also, links can become stale when one document is updated
but the documents it depends on are not.
# Tool summary
The `kata-check-markdown` tool checks a markdown document to ensure all links
within it are valid. All internal links are checked and by default all
external links are also checked. The tool is able to suggest corrections for
some errors it finds. It can also generate a TOC (table of contents).
# Usage
## Basic
```sh
$ kata-check-markdown check README.md
```
## Generate a TOC
```sh
$ kata-check-markdown toc README.md
```
## List headings
To list the document headings in the default `text` format:
```sh
$ kata-check-markdown list headings README.md
```
## List links
To list the links in a document in tab-separated format:
```sh
$ kata-check-markdown list links --format tsv README.md
```
## Full details
Lists all available options:
```sh
$ kata-check-markdown -h
```

View File

@ -0,0 +1 @@
0.0.1

View File

@ -0,0 +1,135 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"errors"
"fmt"
"path/filepath"
"strings"
"github.com/sirupsen/logrus"
)
// linkAddrToPath converts a link address into a path name.
func (d *Doc) linkAddrToPath(address string) (string, error) {
if address == "" {
return "", errors.New("need address")
}
dir := filepath.Dir(d.Name)
var file string
// An "absolute link path" like this has been specified:
//
// [Foo](/absolute-link.md)
if strings.HasPrefix(address, absoluteLinkPrefix) {
if !fileExists(docRoot) {
return "", fmt.Errorf("document root %q does not exist", docRoot)
}
file = filepath.Join(docRoot, address)
} else {
file = filepath.Join(dir, address)
}
return file, nil
}
// addHeading adds the specified heading to the document.
//
// Note that headings must be unique.
func (d *Doc) addHeading(heading Heading) error {
name := heading.Name
if name == "" {
return d.Errorf("heading name cannot be blank: %+v", heading)
}
if heading.LinkName == "" {
return d.Errorf("heading link name cannot be blank: %q (%+v)",
name, heading)
}
if heading.Level <= 0 {
return d.Errorf("heading level must be atleast 1: %q (%+v)",
name, heading)
}
if _, ok := d.Headings[name]; ok {
return d.Errorf("duplicate heading: %q (heading: %+v)",
name, heading)
}
// Potentially change the ID to handle strange characters
// supported in links by GitHub.
id, err := createHeadingID(heading.Name)
if err != nil {
return err
}
heading.LinkName = id
d.Logger.WithField("heading", fmt.Sprintf("%+v", heading)).Debug("adding heading")
d.Headings[name] = heading
return nil
}
// addLink potentially adds the specified link to the document.
//
// Note that links do not need to be unique: a document can contain
// multiple links with:
//
// - the same description and the same address.
// - the same description but with different addresses.
// - different descriptions but with the same address.
func (d *Doc) addLink(link Link) error {
addr := link.Address
if link.ResolvedPath != "" {
addr = link.ResolvedPath
}
if addr == "" {
return d.Errorf("link address cannot be blank: %+v", link)
}
if link.Type == unknownLink {
return d.Errorf("BUG: link type invalid: %+v", link)
}
// Not checked by default as magic "build status" / go report / godoc
// links don't have a description - they have a image only.
if strict && link.Description == "" {
return d.Errorf("link description cannot be blank: %q (%+v)",
addr, link)
}
fields := logrus.Fields{
"link": fmt.Sprintf("%+v", link),
}
links := d.Links[addr]
for _, l := range links {
if l.Type == link.Type {
d.Logger.WithFields(fields).Debug("not adding duplicate link")
return nil
}
}
d.Logger.WithFields(fields).Debug("adding link")
links = append(links, link)
d.Links[addr] = links
return nil
}

View File

@ -0,0 +1,191 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"fmt"
"os"
"path/filepath"
"testing"
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/assert"
)
const (
testFileMode = os.FileMode(0640)
testDirMode = os.FileMode(0750)
readmeName = "README.md"
)
func createFile(file, contents string) error {
return os.WriteFile(file, []byte(contents), testFileMode)
}
// makeDirs creates two directories below the specified base directory: one is
// an empty director named emptyDirName and the other is named readmeDirName
// and contains a markdown file called "README.md".
func makeDirs(assert *assert.Assertions, baseDir string, readmeDirName, emptyDirName string) {
readmeDir := filepath.Join(baseDir, readmeDirName)
err := os.MkdirAll(readmeDir, testDirMode)
assert.NoError(err)
readme := filepath.Join(readmeDir, "README.md")
err = createFile(readme, "# hello")
assert.NoError(err)
emptyDir := filepath.Join(baseDir, emptyDirName)
err = os.MkdirAll(emptyDir, testDirMode)
assert.NoError(err)
}
func TestDocAddHeading(t *testing.T) {
assert := assert.New(t)
type testData struct {
heading Heading
expectError bool
}
data := []testData{
{Heading{"", "", "", -1}, true},
{Heading{"Foo", "", "", -1}, true},
{Heading{"Foo", "", "", 0}, true},
{Heading{"Foo", "", "", 1}, true},
{Heading{"Foo", "", "foo", -1}, true},
{Heading{"Foo", "", "foo", 0}, true},
{Heading{"Foo", "", "foo", 1}, false},
{Heading{"`Foo`", "`Foo`", "foo", 1}, false},
}
logger := logrus.WithField("test", "true")
for i, d := range data {
doc := newDoc("foo", logger)
assert.Empty(doc.Headings)
msg := fmt.Sprintf("test[%d]: %+v\n", i, d)
err := doc.addHeading(d.heading)
if d.expectError {
assert.Error(err, msg)
continue
}
assert.NoError(err, msg)
assert.NotEmpty(doc.Headings, msg)
name := d.heading.Name
result, ok := doc.Headings[name]
assert.True(ok, msg)
assert.Equal(d.heading, result, msg)
}
}
func TestDocAddLink(t *testing.T) {
assert := assert.New(t)
type testData struct {
link Link
expectError bool
}
data := []testData{
{Link{nil, "", "", "", -1}, true},
{Link{nil, "foo", "", "", unknownLink}, true},
{Link{nil, "foo", "", "", internalLink}, false},
{Link{nil, "http://google.com", "", "", urlLink}, false},
{Link{nil, "https://google.com", "", "", urlLink}, false},
{Link{nil, "mailto:me@somewhere.com", "", "", mailLink}, false},
}
logger := logrus.WithField("test", "true")
for i, d := range data {
doc := newDoc("foo", logger)
assert.Empty(doc.Links)
msg := fmt.Sprintf("test[%d]: %+v\n", i, d)
err := doc.addLink(d.link)
if d.expectError {
assert.Error(err, msg)
continue
}
assert.NoError(err, msg)
assert.NotEmpty(doc.Links, msg)
addr := d.link.Address
result := doc.Links[addr][0]
assert.Equal(result, d.link)
}
}
func TestDocLinkAddrToPath(t *testing.T) {
assert := assert.New(t)
dir, err := os.MkdirTemp("", "")
assert.NoError(err)
cwd, err := os.Getwd()
assert.NoError(err)
defer os.Chdir(cwd)
err = os.Chdir(dir)
assert.NoError(err)
defer os.RemoveAll(dir)
savedDocRoot := docRoot
docRoot = dir
defer func() {
docRoot = savedDocRoot
}()
mdFile := "bar.md"
mdPath := filepath.Join("/", mdFile)
actualMDPath := filepath.Join(dir, mdFile)
type testData struct {
linkAddr string
expectedPath string
expectError bool
}
data := []testData{
{"", "", true},
{"bar", "bar", false},
{"bar.md", "bar.md", false},
{mdPath, actualMDPath, false},
}
logger := logrus.WithField("test", "true")
doc := newDoc("foo", logger)
for i, d := range data {
msg := fmt.Sprintf("test[%d]: %+v\n", i, d)
result, err := doc.linkAddrToPath(d.linkAddr)
if d.expectError {
assert.Error(err, msg)
continue
}
assert.NoError(err, msg)
assert.Equal(d.expectedPath, result)
}
}

View File

@ -0,0 +1,118 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"errors"
"fmt"
)
// checkLink checks the validity of the specified link. If checkOtherDoc is
// true and the link is an external one, validate the link by considering the
// external document too.
func (d *Doc) checkLink(address string, link Link, checkOtherDoc bool) error {
if address == "" {
return errors.New("link address not set")
}
switch link.Type {
case externalFile:
fallthrough
case externalLink:
// Check to ensure that referenced file actually exists
var file string
if link.ResolvedPath != "" {
file = link.ResolvedPath
} else {
file, _, err := splitLink(address)
if err != nil {
return err
}
file, err = d.linkAddrToPath(file)
if err != nil {
return err
}
if !fileExists(file) {
return d.Errorf("link type %v invalid: %q does not exist",
link.Type,
file)
}
}
if link.Type == externalFile {
break
}
// Check the other document
other, err := getDoc(file, d.Logger)
if err != nil {
return err
}
if !checkOtherDoc {
break
}
_, section, err := splitLink(address)
if err != nil {
return err
}
if section == "" {
break
}
if !other.hasHeading(section) {
return other.Errorf("invalid link %v", address)
}
case internalLink:
// must be a link to an existing heading
// search for a heading whose LinkName == name
found := d.headingByLinkName(address)
if found == nil {
msg := fmt.Sprintf("failed to find heading for link %q (%+v)", address, link)
// There is a chance the link description matches the
// correct heading the link address refers to. In
// which case, we can derive the correct link address!
suggestion, err2 := createHeadingID(link.Description)
if err2 == nil && suggestion != link.Address {
found = d.headingByLinkName(suggestion)
if found != nil {
msg = fmt.Sprintf("%s - correct link name is %q", msg, suggestion)
}
}
return d.Errorf("%s", msg)
}
case urlLink:
// NOP - handled by xurls
}
return nil
}
// check performs all checks on the document.
func (d *Doc) check() error {
for name, linkList := range d.Links {
for _, link := range linkList {
err := d.checkLink(name, link, false)
if err != nil {
return err
}
}
}
return nil
}

View File

@ -0,0 +1,102 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"fmt"
"os"
"sort"
"github.com/sirupsen/logrus"
)
var outputFile = os.Stdout
// displayHandler is an interface that all output display handlers
// (formatters) must implement.
type DisplayHandler interface {
DisplayHeadings(d *Doc) error
DisplayLinks(d *Doc) error
}
// DisplayHandlers encapsulates the list of available display handlers.
type DisplayHandlers struct {
handlers map[string]DisplayHandler
}
// handlers is a map of the available output format display handling
// implementations.
var handlers map[string]DisplayHandler
// NewDisplayHandlers create a new DisplayHandler.
func NewDisplayHandlers(tsvSeparator string, disableHeader bool) *DisplayHandlers {
separator := rune('\t')
if tsvSeparator != "" {
separator = rune(tsvSeparator[0])
}
if handlers == nil {
handlers = make(map[string]DisplayHandler)
handlers[textFormat] = NewDisplayText(outputFile)
handlers[tsvFormat] = NewDisplayTSV(outputFile, separator, disableHeader)
}
h := &DisplayHandlers{
handlers: handlers,
}
return h
}
// find looks for a display handler corresponding to the specified format
func (d *DisplayHandlers) find(format string) DisplayHandler {
for f, handler := range d.handlers {
if f == format {
return handler
}
}
return nil
}
// Get returns a list of the available formatters (display handler names).
func (d *DisplayHandlers) Get() []string {
var formats []string
for f := range d.handlers {
formats = append(formats, f)
}
sort.Strings(formats)
return formats
}
func show(inputFilename string, logger *logrus.Entry, handler DisplayHandler, what DataToShow) error {
var fn func(*Doc) error
switch what {
case showHeadings:
fn = handler.DisplayHeadings
case showLinks:
fn = handler.DisplayLinks
default:
return fmt.Errorf("unknown show option: %v", what)
}
doc := newDoc(inputFilename, logger)
doc.ListMode = true
err := doc.parse()
if err != nil {
return err
}
return fn(doc)
}

View File

@ -0,0 +1,57 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"fmt"
"os"
)
type displayText struct {
file *os.File
}
func NewDisplayText(file *os.File) DisplayHandler {
return &displayText{
file: file,
}
}
func (d *displayText) DisplayLinks(doc *Doc) error {
for _, linkList := range doc.Links {
for _, link := range linkList {
err := d.displayLink(link)
if err != nil {
return err
}
}
}
return nil
}
func (d *displayText) displayLink(l Link) error {
_, err := fmt.Fprintf(d.file, "%+v\n", l)
return err
}
func (d *displayText) DisplayHeadings(doc *Doc) error {
for _, h := range doc.Headings {
err := d.displayHeading(h)
if err != nil {
return err
}
}
return nil
}
func (d *displayText) displayHeading(h Heading) error {
_, err := fmt.Fprintf(d.file, "%+v\n", h)
return err
}

View File

@ -0,0 +1,72 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"encoding/csv"
"os"
)
type displayTSV struct {
writer *csv.Writer
disableHeader bool
}
func NewDisplayTSV(file *os.File, separator rune, disableHeader bool) DisplayHandler {
tsv := &displayTSV{
disableHeader: disableHeader,
}
tsv.writer = csv.NewWriter(file)
tsv.writer.Comma = separator
return tsv
}
func (d *displayTSV) DisplayLinks(doc *Doc) error {
if !d.disableHeader {
record := linkHeaderRecord()
if err := d.writer.Write(record); err != nil {
return err
}
}
for _, linkList := range doc.Links {
for _, link := range linkList {
record := linkToRecord(link)
if err := d.writer.Write(record); err != nil {
return err
}
}
}
d.writer.Flush()
return d.writer.Error()
}
func (d *displayTSV) DisplayHeadings(doc *Doc) error {
if !d.disableHeader {
record := headingHeaderRecord()
if err := d.writer.Write(record); err != nil {
return err
}
}
for _, l := range doc.Headings {
record := headingToRecord(l)
if err := d.writer.Write(record); err != nil {
return err
}
}
d.writer.Flush()
return d.writer.Error()
}

View File

@ -0,0 +1,76 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"errors"
"fmt"
"github.com/sirupsen/logrus"
)
// Details of the main document, and all other documents it references.
// Key: document name.
var docs map[string]*Doc
func init() {
docs = make(map[string]*Doc)
}
// newDoc creates a new document.
func newDoc(name string, logger *logrus.Entry) *Doc {
d := &Doc{
Name: name,
Headings: make(map[string]Heading),
Links: make(map[string][]Link),
Parsed: false,
ShowTOC: false,
Logger: logger,
}
d.Logger = logger.WithField("file", d.Name)
// add to the hash
docs[name] = d
return d
}
// getDoc returns the Doc structure represented by the specified name,
// creating it and adding to the docs map if necessary.
func getDoc(name string, logger *logrus.Entry) (*Doc, error) {
if name == "" {
return &Doc{}, errors.New("need doc name")
}
doc, ok := docs[name]
if ok {
return doc, nil
}
return newDoc(name, logger), nil
}
// hasHeading returns true if the specified heading exists for the document.
func (d *Doc) hasHeading(name string) bool {
return d.heading(name) != nil
}
// Errorf is a convenience function to generate an error for this particular
// document.
func (d *Doc) Errorf(format string, args ...interface{}) error {
s := fmt.Sprintf(format, args...)
return fmt.Errorf("file=%q: %s", d.Name, s)
}
// String "pretty-prints" the specified document
//
// Just display the name as that is enough in text output.
func (d *Doc) String() string {
return d.Name
}

View File

@ -0,0 +1,93 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"fmt"
bf "gopkg.in/russross/blackfriday.v2"
)
// linkDescription extracts the description from the specified link node.
func linkDescription(l *bf.Node) (string, error) {
if err := checkNode(l, bf.Link); err != nil {
return "", err
}
// A link description can be comprised of various elements so scan
// through them to build up the final value.
text := ""
node := l.FirstChild
for node != nil {
switch node.Type {
case bf.Code:
text += string(node.Literal)
case bf.Text:
text += string(node.Literal)
default:
logger.WithField("node", node).Debug("ignoring node")
}
if node == l.LastChild {
break
}
node = node.Next
}
return text, nil
}
// headingName extracts the heading name from the specified Heading node in
// plain text, and markdown. The latter is used for creating TOC's which need
// to include the original markdown value.
func headingName(h *bf.Node) (name, mdName string, err error) {
if err = checkNode(h, bf.Heading); err != nil {
return "", "", err
}
// A heading can be comprised of various elements so scan
// through them to build up the final value.
node := h.FirstChild
for node != nil {
switch node.Type {
case bf.Code:
value := string(node.Literal)
name += value
mdName += fmt.Sprintf("`%s`", value)
case bf.Text:
value := string(node.Literal)
name += value
mdName += value
case bf.Link:
// yep, people do crazy things like adding links into titles!
descr, err := linkDescription(node)
if err != nil {
return "", "", err
}
name += descr
mdName += descr
default:
logger.WithField("node", node).Debug("ignoring node")
}
if node == h.LastChild {
break
}
node = node.Next
}
return name, mdName, nil
}

View File

@ -0,0 +1,69 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"strings"
bf "gopkg.in/russross/blackfriday.v2"
)
// forceCreateHeadings extracts "missed" headings from the specified node,
// returning a slice of the newly headings created (which need to be added by the
// caller).
//
// Alas, Black Friday isn't 100% reliable...
func (d *Doc) forceCreateHeadings(node *bf.Node) ([]Heading, error) {
if err := checkNode(node, bf.Text); err != nil {
return []Heading{}, err
}
chunk := string(node.Literal)
if chunk == "" {
// No text in this node
return []Heading{}, nil
}
lines := strings.Split(chunk, "\n")
if len(lines) <= 1 {
// No headings lurking in this text node
return []Heading{}, nil
}
var headings []Heading
for _, line := range lines {
if !strings.HasPrefix(line, anchorPrefix) {
continue
}
fields := strings.Split(line, anchorPrefix)
name := strings.Join(fields, "")
name = strings.TrimSpace(name)
count := strings.Count(line, anchorPrefix)
heading := Heading{
Name: name,
Level: count,
}
id, err := createHeadingID(heading.Name)
if err != nil {
return []Heading{}, err
}
heading.LinkName = id
headings = append(headings, heading)
extraHeadings++
}
return headings, nil
}

View File

@ -0,0 +1,36 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import "fmt"
// newHeading creates a new Heading.
func newHeading(name, mdName string, level int) (Heading, error) {
if name == "" {
return Heading{}, fmt.Errorf("heading name cannot be blank")
}
if mdName == "" {
return Heading{}, fmt.Errorf("heading markdown name cannot be blank")
}
linkName, err := createHeadingID(name)
if err != nil {
return Heading{}, err
}
if level < 1 {
return Heading{}, fmt.Errorf("level needs to be atleast 1")
}
return Heading{
Name: name,
MDName: mdName,
LinkName: linkName,
Level: level,
}, nil
}

View File

@ -0,0 +1,65 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"fmt"
"testing"
"github.com/stretchr/testify/assert"
)
func TestNewHeading(t *testing.T) {
assert := assert.New(t)
type testData struct {
headingName string
mdName string
expectedLinkName string
level int
expectError bool
}
data := []testData{
{"", "", "", -1, true},
{"a", "", "", -1, true},
{"a", "a", "", -1, true},
{"a", "a", "", 0, true},
{"a", "", "", 1, true},
{"a", "a", "a", 1, false},
{"a-b", "`a-b`", "`a-b`", 1, false},
{"a_b", "`a_b`", "`a_b`", 1, false},
{"foo (json) bar", "foo `(json)` bar", "foo-json-bar", 1, false},
{"func(json)", "`func(json)`", "funcjson", 1, false},
{"?", "?", "", 1, false},
{"a b", "a b", "a-b", 1, false},
{"a - b", "a - b", "a---b", 1, false},
{"a - b?", "a - b?", "a---b", 1, false},
{"a - b.", "a - b.", "a---b", 1, false},
{"a:b", "a:b", "ab", 1, false},
{"a;b", "a;b", "ab", 1, false},
{"a@b", "a@b", "ab", 1, false},
{"a+b", "a+b", "ab", 1, false},
{"a,b", "a,b", "ab", 1, false},
}
for i, d := range data {
msg := fmt.Sprintf("test[%d]: %+v\n", i, d)
h, err := newHeading(d.headingName, d.mdName, d.level)
if d.expectError {
assert.Error(err, msg)
continue
}
assert.Equal(h.Name, d.headingName, msg)
assert.Equal(h.MDName, d.mdName, msg)
assert.Equal(h.Level, d.level, msg)
assert.Equal(h.LinkName, d.expectedLinkName, msg)
}
}

View File

@ -0,0 +1,122 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"errors"
"os"
"path/filepath"
"regexp"
"strings"
)
// newLink creates a new Link.
func newLink(doc *Doc, address, description string) (Link, error) {
l := Link{
Doc: doc,
Address: address,
Description: description,
}
err := l.categorise()
if err != nil {
return Link{}, err
}
return l, nil
}
// categorise determines the type of Link.
func (l *Link) categorise() error {
address := l.Address
// markdown file extension with optional link name ("#...")
const re = `\.md#*.*$`
pattern := regexp.MustCompile(re)
matched := pattern.MatchString(address)
if strings.HasPrefix(address, "http:") {
l.Type = urlLink
} else if strings.HasPrefix(address, "https:") {
l.Type = urlLink
} else if strings.HasPrefix(address, "mailto:") {
l.Type = mailLink
} else if strings.HasPrefix(address, anchorPrefix) {
l.Type = internalLink
// Remove the prefix to make a valid link address
address = strings.TrimPrefix(address, anchorPrefix)
l.Address = address
} else if matched {
l.Type = externalLink
file, _, err := splitLink(address)
if err != nil {
return err
}
file, err = l.Doc.linkAddrToPath(file)
if err != nil {
return err
}
l.ResolvedPath = file
} else {
isREADME, err := l.handleImplicitREADME()
if err != nil {
return err
}
if !isREADME {
// Link must be an external file, but not a markdown file.
l.Type = externalFile
}
}
return nil
}
// handleImplicitREADME determines if the specified link is an implicit link
// to a README document.
func (l *Link) handleImplicitREADME() (isREADME bool, err error) {
const readme = "README.md"
address := l.Address
if address == "" {
return false, errors.New("need link address")
}
file, err := l.Doc.linkAddrToPath(address)
if err != nil {
return false, err
}
// The resolved path should exist as this is a local file.
st, err := os.Stat(file)
if err != nil {
return false, err
}
if !st.IsDir() {
return false, nil
}
// The file is a directory so try appending the implicit README file
// and see if that exists.
resolvedPath := filepath.Join(file, readme)
success := fileExists(resolvedPath)
if success {
l.Type = externalLink
l.ResolvedPath = resolvedPath
}
return success, nil
}

View File

@ -0,0 +1,209 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"fmt"
"os"
"path/filepath"
"testing"
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/assert"
)
// createLinkAndCategorise will create a link and categorise it. If
// createLinkManually is set, the link will be created "manually" (without the
// constructor) and categorise() called. If not set, the constructor will be
// used.
func createLinkAndCategorise(assert *assert.Assertions, createLinkManually bool) {
dir, err := os.MkdirTemp("", "")
assert.NoError(err)
cwd, err := os.Getwd()
assert.NoError(err)
defer os.Chdir(cwd)
err = os.Chdir(dir)
assert.NoError(err)
defer os.RemoveAll(dir)
readmeDirName := "dir-with-readme"
emptyDirName := "empty"
makeDirs(assert, dir, readmeDirName, emptyDirName)
readmeDirPath := filepath.Join(readmeDirName, readmeName)
topLevelReadmeName := "top-level.md"
topLevelReadmeLink := filepath.Join("/", topLevelReadmeName)
topLevelReadmePath := filepath.Join(dir, topLevelReadmeName)
type testData struct {
linkAddress string
expectedPath string
expectedType LinkType
expectError bool
// Set if expectedPath should be checked
checkPath bool
}
docRoot = dir
data := []testData{
{"", "", -1, true, false},
{"a", "", -1, true, false},
{"a.b", "", -1, true, false},
{"a#b", "", -1, true, false},
{"htt://foo", "", -1, true, false},
{"HTTP://foo", "", -1, true, false},
{"moohttp://foo", "", -1, true, false},
{"mailto", "", -1, true, false},
{"http", "", -1, true, false},
{"https", "", -1, true, false},
{"http://foo", "", urlLink, false, false},
{"https://foo/", "", urlLink, false, false},
{"https://foo/bar", "", urlLink, false, false},
{"mailto:me", "", mailLink, false, false},
{".", "", externalFile, false, false},
{"/", "", externalFile, false, false},
{emptyDirName, "", externalFile, false, false},
{readmeDirName, readmeDirPath, externalLink, false, true},
{"foo.md", "foo.md", externalLink, false, true},
{"foo.md#bar", "foo.md", externalLink, false, true},
{topLevelReadmeLink, topLevelReadmePath, externalLink, false, true},
}
logger := logrus.WithField("test", "true")
description := ""
for i, d := range data {
var link Link
var err error
doc := newDoc("foo", logger)
if createLinkManually {
link = Link{
Doc: doc,
Address: d.linkAddress,
Description: description,
}
err = link.categorise()
} else {
link, err = newLink(doc, d.linkAddress, description)
}
msg := fmt.Sprintf("test[%d] manual-link: %v: %+v, link: %+v\n", i, createLinkManually, d, link)
if d.expectError {
assert.Error(err, msg)
continue
}
assert.NoError(err, msg)
assert.Equal(link.Doc, doc)
assert.Equal(link.Address, d.linkAddress)
assert.Equal(link.Description, description)
assert.Equal(link.Type, d.expectedType)
if d.checkPath {
assert.Equal(d.expectedPath, link.ResolvedPath)
}
}
}
func TestNewLink(t *testing.T) {
assert := assert.New(t)
createLinkAndCategorise(assert, false)
}
func TestLinkCategorise(t *testing.T) {
assert := assert.New(t)
createLinkAndCategorise(assert, true)
}
func TestLinkHandleImplicitREADME(t *testing.T) {
assert := assert.New(t)
dir, err := os.MkdirTemp("", "")
assert.NoError(err)
defer os.RemoveAll(dir)
cwd, err := os.Getwd()
assert.NoError(err)
defer os.Chdir(cwd)
err = os.Chdir(dir)
assert.NoError(err)
defer os.RemoveAll(dir)
readmeDirName := "dir-with-readme"
emptyDirName := "empty"
makeDirs(assert, dir, readmeDirName, emptyDirName)
readmePath := filepath.Join(readmeDirName, readmeName)
emptyFileName := "empty-file"
err = createFile(emptyFileName, "")
assert.NoError(err)
type testData struct {
linkAddr string
expectedPath string
expectedType LinkType
isREADME bool
expectError bool
}
data := []testData{
{"", "", unknownLink, false, true},
{"foo", "", unknownLink, false, true},
{emptyFileName, "", unknownLink, false, false},
{emptyDirName, "", unknownLink, false, false},
{readmeDirName, readmePath, externalLink, true, false},
}
logger := logrus.WithField("test", "true")
for i, d := range data {
doc := newDoc("foo", logger)
link := Link{
Doc: doc,
Address: d.linkAddr,
}
msg := fmt.Sprintf("test[%d]: %+v\n", i, d)
isREADME, err := link.handleImplicitREADME()
if d.expectError {
assert.Error(err, msg)
continue
}
assert.NoError(err, msg)
assert.Equal(isREADME, d.isREADME)
assert.Equal(isREADME, d.isREADME)
assert.Equal(link.Address, d.linkAddr)
assert.Equal(link.Type, d.expectedType)
assert.Equal(link.ResolvedPath, d.expectedPath)
}
}

View File

@ -0,0 +1,348 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"errors"
"fmt"
"os"
"time"
"github.com/sirupsen/logrus"
"github.com/urfave/cli"
)
type DataToShow int
const (
// Character used (after an optional filename) before a heading ID.
anchorPrefix = "#"
// Character used to signify an "absolute link path" which should
// expand to the value of the document root.
absoluteLinkPrefix = "/"
showLinks DataToShow = iota
showHeadings DataToShow = iota
textFormat = "text"
tsvFormat = "tsv"
defaultOutputFormat = textFormat
defaultSeparator = "\t"
)
var (
// set by the build
name = ""
version = ""
commit = ""
strict = false
// list entry character to use when generating TOCs
listPrefix = "*"
logger *logrus.Entry
errNeedFile = errors.New("need markdown file")
)
// Black Friday sometimes chokes on markdown (I know!!), so record how many
// extra headings we found.
var extraHeadings int
// Root directory used to handle "absolute link paths" that start with a slash
// to denote the "top directory", like this:
//
// [Foo](/absolute-link.md)
var docRoot string
var notes = fmt.Sprintf(`
NOTES:
- The document root is used to handle markdown references that begin with %q,
denoting that the path that follows is an "absolute path" from the specified
document root path.
- The order the document nodes are parsed internally is not known to
this program. This means that if multiple errors exist in the document,
running this tool multiple times will error one *one* of the errors, but not
necessarily the same one as last time.
LIMITATIONS:
- The default document root only works if this tool is run from the top-level
of a repository.
`, absoluteLinkPrefix)
var formatFlag = cli.StringFlag{
Name: "format",
Usage: "display in specified format ('help' to show all)",
Value: defaultOutputFormat,
}
var separatorFlag = cli.StringFlag{
Name: "separator",
Usage: fmt.Sprintf("use the specified separator character (%s format only)", tsvFormat),
Value: defaultSeparator,
}
var noHeaderFlag = cli.BoolFlag{
Name: "no-header",
Usage: "disable display of header (if format supports one)",
}
func init() {
logger = logrus.WithFields(logrus.Fields{
"name": name,
"source": "check-markdown",
"version": version,
"commit": commit,
"pid": os.Getpid(),
})
logger.Logger.Formatter = &logrus.TextFormatter{
TimestampFormat: time.RFC3339Nano,
//DisableColors: true,
}
// Write to stdout to avoid upsetting CI systems that consider stderr
// writes as indicating an error.
logger.Logger.Out = os.Stdout
}
func handleLogging(c *cli.Context) {
logLevel := logrus.InfoLevel
if c.GlobalBool("debug") {
logLevel = logrus.DebugLevel
}
logger.Logger.SetLevel(logLevel)
}
func handleDoc(c *cli.Context, createTOC bool) error {
handleLogging(c)
if c.NArg() == 0 {
return errNeedFile
}
fileName := c.Args().First()
if fileName == "" {
return errNeedFile
}
singleDocOnly := c.GlobalBool("single-doc-only")
doc := newDoc(fileName, logger)
doc.ShowTOC = createTOC
if createTOC {
// Only makes sense to generate a single TOC!
singleDocOnly = true
}
// Parse the main document first
err := doc.parse()
if err != nil {
return err
}
if singleDocOnly && len(docs) > 1 {
doc.Logger.Debug("Not checking referenced files at user request")
return nil
}
// Now handle all other docs that the main doc references.
// This requires care to avoid recursion.
for {
count := len(docs)
parsed := 0
for _, doc := range docs {
if doc.Parsed {
// Document has already been handled
parsed++
continue
}
if err := doc.parse(); err != nil {
return err
}
}
if parsed == count {
break
}
}
err = handleIntraDocLinks()
if err != nil {
return err
}
if !createTOC {
doc.Logger.Info("Checked file")
doc.showStats()
}
count := len(docs)
if count > 1 {
// Update to ignore main document
count--
doc.Logger.WithField("reference-document-count", count).Info("Checked referenced files")
for _, d := range docs {
if d.Name == doc.Name {
// Ignore main document
continue
}
fmt.Printf("\t%q\n", d.Name)
}
}
// Highlight blackfriday deficiencies
if !doc.ShowTOC && extraHeadings > 0 {
doc.Logger.WithField("extra-heading-count", extraHeadings).Debug("Found extra headings")
}
return nil
}
// commonListHandler is used to handle all list operations.
func commonListHandler(context *cli.Context, what DataToShow) error {
handleLogging(context)
handlers := NewDisplayHandlers(context.String("separator"), context.Bool("no-header"))
format := context.String("format")
if format == "help" {
availableFormats := handlers.Get()
for _, format := range availableFormats {
fmt.Fprintf(outputFile, "%s\n", format)
}
return nil
}
handler := handlers.find(format)
if handler == nil {
return fmt.Errorf("no handler for format %q", format)
}
if context.NArg() == 0 {
return errNeedFile
}
file := context.Args().Get(0)
return show(file, logger, handler, what)
}
func realMain() error {
cwd, err := os.Getwd()
if err != nil {
return err
}
docRoot = cwd
cli.VersionPrinter = func(c *cli.Context) {
fmt.Fprintln(os.Stdout, c.App.Version)
}
cli.AppHelpTemplate = fmt.Sprintf(`%s%s`, cli.AppHelpTemplate, notes)
app := cli.NewApp()
app.Name = name
app.Version = fmt.Sprintf("%s %s (commit %v)", name, version, commit)
app.Description = "Tool to check GitHub-Flavoured Markdown (GFM) format documents"
app.Usage = app.Description
app.UsageText = fmt.Sprintf("%s [options] file ...", app.Name)
app.Flags = []cli.Flag{
cli.BoolFlag{
Name: "debug, d",
Usage: "display debug information",
},
cli.StringFlag{
Name: "doc-root, r",
Usage: "specify document root",
Value: docRoot,
},
cli.BoolFlag{
Name: "single-doc-only, o",
Usage: "only check primary (specified) document",
},
cli.BoolFlag{
Name: "strict, s",
Usage: "enable strict mode",
},
}
app.Commands = []cli.Command{
{
Name: "check",
Usage: "perform tests on the specified document",
Description: "Exit code denotes success",
Action: func(c *cli.Context) error {
return handleDoc(c, false)
},
},
{
Name: "toc",
Usage: "display a markdown Table of Contents",
Action: func(c *cli.Context) error {
return handleDoc(c, true)
},
},
{
Name: "list",
Usage: "display particular parts of the document",
Subcommands: []cli.Command{
{
Name: "headings",
Usage: "display headings",
Flags: []cli.Flag{
formatFlag,
noHeaderFlag,
separatorFlag,
},
Action: func(c *cli.Context) error {
return commonListHandler(c, showHeadings)
},
},
{
Name: "links",
Usage: "display links",
Flags: []cli.Flag{
formatFlag,
noHeaderFlag,
separatorFlag,
},
Action: func(c *cli.Context) error {
return commonListHandler(c, showLinks)
},
},
},
},
}
return app.Run(os.Args)
}
func main() {
err := realMain()
if err != nil {
logger.Fatalf("%v", err)
}
}

View File

@ -0,0 +1,115 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
bf "gopkg.in/russross/blackfriday.v2"
)
// handleNode processes the specified node.
func (d *Doc) handleNode(node *bf.Node) error {
var err error
switch node.Type {
case bf.Heading:
err = d.handleHeading(node)
case bf.Link:
err = d.handleLink(node)
case bf.Text:
// handle blackfriday deficiencies
headings, err := d.forceCreateHeadings(node)
if err != nil {
return err
}
for _, heading := range headings {
err := d.addHeading(heading)
if err != nil {
return err
}
}
default:
return nil
}
return err
}
// makeHeading creates a heading from the specified node.
func (d *Doc) makeHeading(node *bf.Node) (Heading, error) {
if err := checkNode(node, bf.Heading); err != nil {
return Heading{}, err
}
name, mdName, err := headingName(node)
if err != nil {
return Heading{}, d.Errorf("failed to get heading name: %v", err)
}
data := node.HeadingData
heading, err := newHeading(name, mdName, data.Level)
if err != nil {
return Heading{}, err
}
return heading, nil
}
// handleHeading processes the heading represented by the specified node.
func (d *Doc) handleHeading(node *bf.Node) error {
if err := checkNode(node, bf.Heading); err != nil {
return err
}
heading, err := d.makeHeading(node)
if err != nil {
return err
}
return d.addHeading(heading)
}
func (d *Doc) handleLink(node *bf.Node) error {
if err := checkNode(node, bf.Link); err != nil {
return err
}
address := string(node.Destination)
description, err := linkDescription(node)
if err != nil {
return d.Errorf("failed to get link name: %v", err)
}
link, err := newLink(d, address, description)
if err != nil {
return err
}
return d.addLink(link)
}
// handleIntraDocLinks checks the links between documents are correct.
//
// For example, if a document refers to "foo.md#section-bar", this function
// will ensure that "section-bar" exists in external file "foo.md".
func handleIntraDocLinks() error {
for _, doc := range docs {
for addr, linkList := range doc.Links {
for _, link := range linkList {
err := doc.checkLink(addr, link, true)
if err != nil {
return doc.Errorf("intra-doc link invalid: %v", err)
}
}
}
}
return nil
}

View File

@ -0,0 +1,100 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"fmt"
"os"
"strings"
bf "gopkg.in/russross/blackfriday.v2"
)
// List of errors found by visitor. Used as the visitor cannot return an error
// directly.
var errorList []error
func (d *Doc) parse() error {
if !d.ShowTOC && !d.ListMode {
d.Logger.Info("Checking file")
}
err := d.parseMarkdown()
if err != nil {
return err
}
// mark document as having been handled
d.Parsed = true
return nil
}
// parseMarkdown parses the documents markdown.
func (d *Doc) parseMarkdown() error {
bytes, err := os.ReadFile(d.Name)
if err != nil {
return err
}
md := bf.New(bf.WithExtensions(bf.CommonExtensions))
root := md.Parse(bytes)
root.Walk(makeVisitor(d, d.ShowTOC))
errorCount := len(errorList)
if errorCount > 0 {
extra := ""
if errorCount != 1 {
extra = "s"
}
var msg []string
for _, err := range errorList {
msg = append(msg, err.Error())
}
return fmt.Errorf("found %d parse error%s:\n%s",
errorCount,
extra,
strings.Join(msg, "\n"))
}
return d.check()
}
// makeVisitor returns a function that is used to visit all document nodes.
//
// If createTOC is false, the visitor will check all nodes, but if true, the
// visitor will only display a table of contents for the document.
func makeVisitor(doc *Doc, createTOC bool) func(node *bf.Node, entering bool) bf.WalkStatus {
f := func(node *bf.Node, entering bool) bf.WalkStatus {
if !entering {
return bf.GoToNext
}
var err error
if createTOC {
err = doc.displayTOC(node)
} else {
err = doc.handleNode(node)
}
if err != nil {
// The visitor cannot return an error, so collect up all parser
// errors for dealing with later.
errorList = append(errorList, err)
}
return bf.GoToNext
}
return f
}

View File

@ -0,0 +1,43 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import "fmt"
func linkHeaderRecord() []string {
return []string{
"Document",
"Address",
"Path",
"Description",
"Type",
}
}
func linkToRecord(l Link) (record []string) {
record = append(record, l.Doc.Name)
record = append(record, l.Address)
record = append(record, l.ResolvedPath)
record = append(record, l.Description)
record = append(record, l.Type.String())
return record
}
func headingHeaderRecord() []string {
return []string{
"Name",
"Link",
"Level",
}
}
func headingToRecord(h Heading) (record []string) {
record = append(record, h.Name)
record = append(record, h.LinkName)
record = append(record, fmt.Sprintf("%d", h.Level))
return record
}

View File

@ -0,0 +1,29 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
// headingByLinkName returns the heading associated with the specified link name.
func (d *Doc) headingByLinkName(linkName string) *Heading {
for _, heading := range d.Headings {
if heading.LinkName == linkName {
return &heading
}
}
return nil
}
// heading returns the heading with the name specified.
func (d *Doc) heading(name string) *Heading {
for _, heading := range d.Headings {
if name == heading.LinkName {
return &heading
}
}
return nil
}

View File

@ -0,0 +1,41 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"fmt"
"github.com/sirupsen/logrus"
)
func (d *Doc) showStats() {
var counters [LinkTypeCount]int
linkCount := 0
for _, linkList := range d.Links {
for _, link := range linkList {
counters[link.Type]++
linkCount++
}
}
fields := logrus.Fields{
"headings-count": len(d.Headings),
"links-count": linkCount,
}
for i, count := range counters {
name := LinkType(i).String()
fieldName := fmt.Sprintf("link-type-%s-count", name)
fields[fieldName] = count
}
d.Logger.WithFields(fields).Info("Statistics")
}

View File

@ -0,0 +1,75 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"fmt"
"strings"
bf "gopkg.in/russross/blackfriday.v2"
)
// displayTOC displays a table of contents entry for the specified node.
func (d *Doc) displayTOC(node *bf.Node) error {
switch node.Type {
case bf.Heading:
return d.displayTOCEntryFromNode(node)
case bf.Text:
// handle blackfriday deficiencies
headings, err := d.forceCreateHeadings(node)
if err != nil {
return err
}
for _, heading := range headings {
err := d.displayTOCEntryFromHeading(heading)
if err != nil {
return err
}
}
}
return nil
}
// displayTOCEntryFromHeading displays a table of contents entry
// for the specified heading.
func (d *Doc) displayTOCEntryFromHeading(heading Heading) error {
const indentSpaces = 4
prefix := ""
level := heading.Level
// Indent needs to be zero for top level headings
level--
if level > 0 {
prefix = strings.Repeat(" ", level*indentSpaces)
}
entry := fmt.Sprintf("[%s](%s%s)", heading.MDName, anchorPrefix, heading.LinkName)
fmt.Printf("%s%s %s\n", prefix, listPrefix, entry)
return nil
}
// displayTOCEntryFromHeading displays a table of contents entry
// for the specified heading.
func (d *Doc) displayTOCEntryFromNode(node *bf.Node) error {
if err := checkNode(node, bf.Heading); err != nil {
return err
}
heading, err := d.makeHeading(node)
if err != nil {
return err
}
return d.displayTOCEntryFromHeading(heading)
}

View File

@ -0,0 +1,159 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import "github.com/sirupsen/logrus"
// LinkType represents the type of a link in a markdown document.
type LinkType int
const (
unknownLink LinkType = iota
internalLink LinkType = iota
externalLink LinkType = iota // External ".md" file
externalFile LinkType = iota // External non-".md" file
urlLink LinkType = iota
mailLink LinkType = iota
LinkTypeCount LinkType = iota
)
func (t LinkType) String() string {
var name string
switch t {
case unknownLink:
name = "unknown"
case internalLink:
name = "internal-link"
case externalLink:
name = "external-link"
case externalFile:
name = "external-file"
case urlLink:
name = "url-link"
case mailLink:
name = "mail-link"
}
return name
}
// Heading is a markdown heading, which might be the destination
// for a link.
//
// Example: A heading like this:
//
// ### This is a `verbatim` heading
//
// ... would be described as:
//
// ```go
//
// Heading{
// Name: "This is a verbatim heading",
// MDName "This is a `verbatim` heading",
// LinkName: "this-is-a-verbatim-heading",
// Level: 3,
// }
//
// ```
type Heading struct {
// Not strictly necessary since the name is used as a hash key.
// However, storing here too makes the code simpler ;)
Name string
// Name including any markdown syntax
MDName string
// The encoded value of Name.
LinkName string
// Heading level (1 for top level)
Level int
}
// Link is a reference to another part of this document
// (or another document).
//
// Example: A link like this:
//
// [internal link](#internal-section-name)
//
// ... would be described as:
//
// ```go
//
// Link{
// Address: "internal-section-name",
// ResolvedPath: "",
// Description: "internal link",
// Type: internalLink,
// }
//
// And a link like this:
//
// [external link](/foo.md#section-name)
//
// ... would be described as:
//
// ```go
//
// Link{
// Address: "foo.md#section-name",
// ResolvedPath: "/docroot/foo.md",
// Description: "external link",
// Type: externalLink,
// }
//
// ```
type Link struct {
// Document this link refers to.
Doc *Doc
// Original address from document.
//
// Must be a valid Heading.LinkName.
//
// Not strictly necessary since the address is used as a hash key.
// However, storing here too makes the code simpler ;)
Address string
// The fully expanded address, without any anchor and heading suffix.
//
// Only applies to certain link types.
ResolvedPath string
// The text the user sees for the hyperlink address
Description string
Type LinkType
}
// Doc represents a markdown document.
type Doc struct {
Logger *logrus.Entry
// Key: heading name
// Value: Heading
Headings map[string]Heading
// Key: link address
// Value: *list* of links. Required since you can have multiple links with
// the same _address_, but of a different type.
Links map[string][]Link
// Filename
Name string
// true when this document has been fully parsed
Parsed bool
// if true, only show the Table Of Contents
ShowTOC bool
ListMode bool
}

View File

@ -0,0 +1,97 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"errors"
"fmt"
"os"
"strings"
"unicode"
bf "gopkg.in/russross/blackfriday.v2"
)
// fileExists returns true if the specified file exists, else false.
func fileExists(path string) bool {
if _, err := os.Stat(path); os.IsNotExist(err) {
return false
}
return true
}
// splitLink splits a link like "foo.md#section-name" into a filename
// ("foo.md") and a section name ("section-name").
func splitLink(linkName string) (fileName, sectionName string, err error) {
if linkName == "" {
return "", "", errors.New("need linkName")
}
if !strings.Contains(linkName, anchorPrefix) {
return linkName, "", nil
}
fields := strings.Split(linkName, anchorPrefix)
expectedFields := 2
foundFields := len(fields)
if foundFields != expectedFields {
return "", "", fmt.Errorf("invalid link %s: expected %d fields, found %d", linkName, expectedFields, foundFields)
}
fileName = fields[0]
sectionName = fields[1]
return fileName, sectionName, nil
}
// validHeadingIDChar is a strings.Map() function used to determine which characters
// can appear in a heading ID.
func validHeadingIDChar(r rune) rune {
if unicode.IsLetter(r) ||
unicode.IsNumber(r) ||
unicode.IsSpace(r) ||
r == '-' || r == '_' {
return r
}
// Remove all other chars from destination string
return -1
}
// createHeadingID creates an HTML anchor name for the specified heading
func createHeadingID(headingName string) (id string, err error) {
if headingName == "" {
return "", fmt.Errorf("need heading name")
}
// Munge the original heading into an id by:
//
// - removing invalid characters.
// - lower-casing.
// - replace spaces
id = strings.Map(validHeadingIDChar, headingName)
id = strings.ToLower(id)
id = strings.Replace(id, " ", "-", -1)
return id, nil
}
func checkNode(node *bf.Node, expectedType bf.NodeType) error {
if node == nil {
return errors.New("node cannot be nil")
}
if node.Type != expectedType {
return fmt.Errorf("expected %v node, found %v", expectedType, node.Type)
}
return nil
}

View File

@ -0,0 +1,149 @@
//
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"fmt"
"testing"
"github.com/stretchr/testify/assert"
)
func TestSplitLink(t *testing.T) {
assert := assert.New(t)
type testData struct {
linkName string
file string
section string
valid bool
}
data := []testData{
{"", "", "", false},
{"foo.md", "foo.md", "", true},
{"#bar", "", "bar", true},
{"foo.md#bar", "foo.md", "bar", true},
{"foo.md%%bar", "foo.md%%bar", "", true},
}
for i, d := range data {
file, section, err := splitLink(d.linkName)
if d.valid {
assert.NoErrorf(err, "test[%d]: %+v", i, d)
assert.Equal(file, d.file, "test[%d]: %+v", i, d)
assert.Equal(section, d.section, "test[%d]: %+v", i, d)
} else {
assert.Errorf(err, "test[%d]: %+v", i, d)
}
}
}
func TestValidHeadingIDChar(t *testing.T) {
assert := assert.New(t)
type testData struct {
ch rune
valid bool
}
data := []testData{
{' ', true},
{'\t', true},
{'\n', true},
{'a', true},
{'z', true},
{'A', true},
{'Z', true},
{'0', true},
{'9', true},
{'-', true},
{'_', true},
{'\000', false},
{'\001', false},
}
for i, d := range data {
result := validHeadingIDChar(d.ch)
var outcome bool
if d.valid {
outcome = result != -1
} else {
outcome = result == -1
}
assert.Truef(outcome, "test[%d]: %+v", i, d)
}
// the main list of invalid chars to test
invalid := "!@#$%^&*()+=[]{}\\|:\";'<>?,./"
for i, ch := range invalid {
result := validHeadingIDChar(ch)
outcome := result == -1
assert.Truef(outcome, "invalid[%d]: %+v", i, ch)
}
}
func TestCreateHeadingID(t *testing.T) {
assert := assert.New(t)
type testData struct {
heading string
id string
expecteError bool
}
data := []testData{
{"", "", true},
{"a", "a", false},
{"a.b/c:d", "abcd", false},
{"a ?", "a-", false},
{"a !?!", "a-", false},
{"foo", "foo", false},
{"foo bar", "foo-bar", false},
{"foo_bar", "foo_bar", false},
{"foo_bar()", "foo_bar", false},
{"`foo_bar()`", "foo_bar", false},
{"foo_bar()baz", "foo_barbaz", false},
{"Stability or Performance?", "stability-or-performance", false},
{"Hello - World", "hello---world", false},
{"metrics_json_init()", "metrics_json_init", false},
{"metrics_json_add_array_element(json)", "metrics_json_add_array_elementjson", false},
{"What is it ?", "what-is-it-", false},
{"Sandbox `DeviceInfo`", "sandbox-deviceinfo", false},
{"Build a custom QEMU for aarch64/arm64 - REQUIRED", "build-a-custom-qemu-for-aarch64arm64---required", false},
{"docker --net=host", "docker---nethost", false},
{"Containerd Runtime V2 API (Shim V2 API)", "containerd-runtime-v2-api-shim-v2-api", false},
{"Containerd Runtime V2 API: Shim V2 API", "containerd-runtime-v2-api-shim-v2-api", false},
{"Launch i3.metal instance", "launch-i3metal-instance", false},
{"Deploy!", "deploy", false},
}
for i, d := range data {
id, err := createHeadingID(d.heading)
msg := fmt.Sprintf("test[%d]: %+v, id: %q\n", i, d, id)
if d.expecteError {
assert.Error(err)
continue
}
assert.Equal(id, d.id, msg)
}
}

View File

@ -0,0 +1,178 @@
# Spell check tool
## Overview
The `kata-spell-check.sh` tool is used to check a markdown file for
typographical (spelling) mistakes.
## Approach
The spell check tool is based on
[`hunspell`](https://github.com/hunspell/hunspell). It uses standard Hunspell
English dictionaries and supplements these with a custom Hunspell dictionary.
The document is cleaned of several entities before the spell-check begins.
These entities include the following:
- URLs
- Email addresses
- Code blocks
- Most punctuation
- GitHub userids
## Custom words
A custom dictionary is required to accept specific words that are either well
understood by the community or are defined in various document files, but do
not appear in standard dictionaries. The custom dictionaries allow those words
to be accepted as correct. The following lists common examples of such words:
- Abbreviations
- Acronyms
- Company names
- Product names
- Project names
- Technical terms
## Spell check a document file
```sh
$ ./kata-spell-check.sh check /path/to/file
```
> **Note:** If you have made local edits to the dictionaries, you may
> [re-create the master dictionary files](#create-the-master-dictionary-files)
> as documented in the [Adding a new word](#adding-a-new-word) section,
> in order for your local edits take effect.
## Other options
Lists all available options and commands:
```sh
$ ./kata-spell-check.sh -h
```
## Technical details
### Hunspell dictionary format
A Hunspell dictionary comprises two text files:
- A word list file
This file defines a list of words (one per line). The list includes optional
references to one or more rules defined in the rules file as well as optional
comments. Specify fixed words (e.g. company names) verbatim. Enter “normal”
words in their root form.
The root form of a "normal" word is the simplest and shortest form of that
word. For example, the following list of words are all formed from the root
word "computer":
- Computers
- Computers
- Computing
- Computed
Each word in the previous list is an example of using the word "computer" to
construct said word through a combination of applying the following
manipulations:
- Remove one or more characters from the end of the word.
- Add a new ending.
Therefore, you list the root word "computer" in the word list file.
- A rules file
This file defines named manipulations to apply to root words to form new
words. For example, rules that make a root word plural.
### Source files
The rules file and the the word list file for the custom dictionary generate
from "source" fragment files in the [`data`](data/) directory.
All the fragment files allow comments using the hash (`#`) comment
symbol and all files contain a comment header explaining their content.
#### Word list file fragments
The `*.txt` files are word list file fragments. Splitting the word list
into fragments makes updates easier and clearer as each fragment is a
grouping of related terms. The name of the file gives a clue as to the
contents but the comments at the top of each file provide further
detail.
Every line that does not start with a comment symbol contains a single
word. An optional comment for a word may appear after the word and is
separated from the word by whitespace followed by the comment symbol:
```
word # This is a comment explaining this particular word list entry.
```
You *may* suffix each word by a forward slash followed by one or more
upper-case letters. Each letter refers to a rule name in the rules file:
```
word/AC # This word references the 'A' and 'C' rules.
```
#### Rules file
The [rules file](data/rules.aff) contains a set of general rules that can be
applied to one or more root words in the word list files. You can make
comments in the rules file.
For an explanation of the format of this file see
[`man 5 hunspell`](http://www.manpagez.com/man/5/hunspell)
([source](https://github.com/hunspell/hunspell/blob/master/man/hunspell.5)).
## Adding a new word
### Update the word list fragment
If you want to allow a new word to the dictionary,
- Check to ensure you do need to add the word
Is the word valid and correct? If the word is a project, product,
or company name, is the capitalization correct?
- Add the new word to the appropriate [word list fragment file](data).
Specifically, if it is a general word, add the *root* of the word to
the appropriate fragment file.
- Add a `/` suffix along with the letters for each rule to apply in order to
add rules references.
### Optionally update the rules file
It should not generally be necessary to update the rules file since it
already contains rules for most scenarios. However, if you need to
update the file, [read the documentation carefully](#rules-file).
### Create the master dictionary files
Every time you change the dictionary files you must recreate the master
dictionary files:
```sh
$ ./kata-spell-check.sh make-dict
```
As a convenience, [checking a file](#spell-check-a-document-file) will
automatically create the database.
### Test the changes
You must test any changes to the [word list file
fragments](#word-list-file-fragments) or the [rules file](#rules-file)
by doing the following:
1. Recreate the [master dictionary files](#create-the-master-dictionary-files).
1. [Run the spell checker](#spell-check-a-document-file) on a file containing the
words you have added to the dictionary.

View File

@ -0,0 +1,123 @@
# Copyright (c) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# Description: List of acronyms and abbreviations.
ACPI/AB
ACS/AB
API/AB
AUFS # Another Union FS
AWS/AB
BDF/AB
CFS/AB
CLI/AB
CNI/AB
CNM/AB
CPUID/AB
CRI/AB
CVE/AB
DAX/AB
DinD/B # Docker in Docker
dind/B
DMA/AB
DPDK/AB
FaaS/B # Function as a Service
FS/AB
fs/B # For terms like "virtio-fs"
GCE/AB
GOPATH/AB
GPG/AB
GPU/AB
gRPC/AB
GSC/AB
GVT/AB
IaaS/B # Infrastructure as a Service
IOMMU/AB
IoT/AB # Internet of Things
IOV/AB
JSON/AB
k8s/B
KCSA/AB
KSM/AB
KVM/AB
LTS/AB
MACVTAP/AB
mem/B # For terms like "virtio-mem"
memdisk/B
MDEV/AB
NEMU/AB
NIC/AB
NVDIMM/AB
OCI/AB
OVMF/AB
OverlayFS/B
PaaS/B # Platform as a Service
PCDIMM/AB
PCI/AB
PCIe/AB
PID/AB
pmem/B # persistent memory
PNG/AB
POD/AB
PR/AB
PSS/AB
QA/AB
QAT/AB
QEMU/AB
RBAC/AB
RDMA/AB
RNG/AB
SaaS/B # Software as a Service
SCSI/AB
SDK/AB
seccomp # secure computing mode
SHA/AB
SPDX/AB
SRIOV/AB
SVG/AB
TBD/AB
TOC/AB
TOML/AB
TTY/AB
UI/AB
UTS/AB
UUID/AB
vCPU/AB
VETH/AB
VF/AB
VFIO/AB
VGPU/AB
vhost/AB
VHOST/AB
virtio/AB
VirtIO/AB
Virtio-fs/AB
Virtio-mem/AB
VLAN/AB
VM/AB
VMCache/AB
vmm
VMM/AB
VMX/AB
VPP/AB
VSOCK/AB
VSS/AB
WIP/AB # Work In Progress
WRT/AB # With Respect To
XIP/AB
YAML/AB
irq/AB
mmio/AB
APIC
msg/AB
UDS
dbs # Dragonball Sandbox
TDX
tdx
mptable
fdt
gic
msr
cpuid
pio

View File

@ -0,0 +1,21 @@
# Copyright (c) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# Description: List of architectures.
# Architectures
aarch64/B
amd64/B
arm64/B
ppc64el/B
ppc64le/B
s390x/B
x86_64/B
x86/B
# Micro architecture names
Haswell/B
Ivybridge/B

View File

@ -0,0 +1,18 @@
# Copyright (c) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# Description: List of Linux Distributions.
CentOS/B
Debian/B
EulerOS/B
Fedora/B
macOS/B
MacOS/B
minikube/B
openSUSE/B
OpenSUSE/B
RHEL/B
SLES/B
Ubuntu/B

View File

@ -0,0 +1,25 @@
# Copyright (c) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# Description: Names of commands, files and packages.
#
# Notes: These *should* strictly be placed in backticks but alas this
# doesn't always happen.
#
# References: https://github.com/kata-containers/kata-containers/blob/main/docs/Documentation-Requirements.md#files-and-command-names
cgroup/AB
coredump/A
cpuset/AB
Dockerfile/AB
init/AB
initramfs/AB
initrd/AB
netns/AB
rootfs/AB
stderr/AB
stdin/AB
stdout/AB
syslog/AB
Vagrantfile/B

View File

@ -0,0 +1,13 @@
# Copyright (c) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# Description: List of words that are missing from Hunspell dictionaries
# on some platforms.
committer/AB # Not available on Ubuntu 16.04 or CentOS 7
plugin/AB # Not available on Ubuntu 16.04
regexp/AB # Not available on Ubuntu 16.04
screenshot/AB # Not available on Ubuntu 16.04 or CentOS 7
tarball/AB # Not available on Ubuntu 16.04
uninstall # Not available on Ubuntu 16.04

View File

@ -0,0 +1,135 @@
# Copyright (c) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# Description: General word list.
ack/A
arg # Argument
auditability
backend
backport/ACD
backtick/AB
backtrace
bootloader/AB
centric/B
checkbox/A
chipset/AB
codebase
commandline
config/AB
crypto # Cryptography
cryptoprocessor/AB
DaemonSet/AB
deliverable/AB
dev
devicemapper/B
deploy
dialer
dialog/A
Diffie/B # DiffieHellman (cryptography)
distro/AB
emptydir/A
enablement/AB
entrypoint/AB
ethernet
filename/AB
filesystem/AB
freeform
goroutine/AB
hostname/AB
hotplug/ACD
howto/AB
HugePage/AB
hugepage/AB
Hyp
hypercall/A
hypervisor/AB
implementer/A
implementor/A
Infiniband
iodepth/A
ioengine/A
iptables
Itanium/AB
kata
Kat/AB # "Kat Herding Team" :)
keypair/A
lifecycle/A
linter/AB
logfile/A
Longterm
longterm
loopback
memcpy/A
mergeable
metadata
microcontroller/AB
miniOS
mmap/AB
nack/AB
namespace/ABCD
netlink
NVIDIA/A
nvidia/A
onwards
OpenAPI
OS/AB
parallelize/AC
passthrough
patchset/A
pluggable/AB
portmapper/AB
portmapping/A
pre
prefetch/ACD
prestart
programmatically
proxying
Quadro
ramdisk/A
readonly
rebase/ACD
refactor/ACD
remediate
repo/A
runtime/AB
scalability
serverless
signoff/A
stalebot/B
startup
subdirectory/A
swappiness
sysctl/AB
teardown
templating
timestamp/AB
tracability
ttRPC/B
udev/B
uevent/AB
unbootable
uncomment/ACD
unported
unskip/AC
untrusted
untrusting
userid/AB
userspace/B
vendored
vendoring
versioning
vGPU
virtualization
virtualized
webhook/AB
whitespace
workflow/A
Xeon/A
yaml
upcall
Upcall
ioctl/A
struct/A # struct in Rust
Struct/A

View File

@ -0,0 +1,101 @@
# Copyright (c) 2019-2023 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# Description: Names of projects, companies and services.
Ansible/B
AppArmor/B
blogbench/B
BusyBox/B
Cassandra/B
ccloudvm/B
codecov/B
containerd/B
cnn/B
cri-o/B
CRI-O/B
DevStack/B
Django/B
Docker/B
dracut/B
Dragonball/B
Facebook/B
fio/B
Fluentd/B
Frakti/B
Git/B
GitHub/B
GoDoc/B
golang/B
Golang/B
Grafana/B
Gramine/B
Huawei/B
Inclavare/B
iPerf/B
IPerf/B
Istio/B
Jaeger/B
Jenkins/B
Jupyter/B
journald/B
jq/B
Kata/B
Kibana/B
Kubelet/B
Kubernetes/B
Launchpad/B
LevelDB/B
libcontainer/B
libelf/B
libvirt/B
Linkerd/B
LinuxONE/B
Logrus/B
Logstash/B
Mellanox/B
Minikube/B
MITRE/B
musl/B
Netlify/B
Nginx/B
OpenCensus/B
OpenPGP/B
OpenShift/B
OpenSSL/B
OpenStack/B
OpenTelemetry/B
OpenTracing/B
osbuilder/B
packagecloud/B
Pandoc/B
Podman/B
PullApprove/B
Pytorch/B
QuickAssist/B
R/B
raytracer/B
rkt/B/B
runc/B
runV/B
rustlang/B
Rustlang/B
SELinux/B
SemaphoreCI/B
snapcraft/B
snapd/B
SQLite/B
SUSE/B
Sysbench/B
systemd/B
tf/B
TravisCI/B
Tokio/B
Vexxhost/B
virtcontainers/B
VMWare/B
vSphere/B
Yamux/B
yq/B
Zun/B

View File

@ -0,0 +1,36 @@
#
# Copyright (c) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
SET UTF-8
# Add the following characters so they are accepted as part of a word
WORDCHARS 0123456789'
# Disable hyphenation
BREAK 0
# plural
SFX A N 3
SFX A 0 s [^x]
SFX A 0 es x
SFX A y ies
# possession
SFX B N 1
SFX B 0 's
# past tense
SFX C N 4
SFX C 0 d e
SFX C 0 ed [rt]
SFX C 0 ped p
SFX C 0 ged g
# present continuous
SFX D N 3
SFX D 0 ging g
SFX D 0 ing [rt]
SFX D e ing e

View File

@ -0,0 +1,36 @@
#
# Copyright (c) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
SET UTF-8
# Add the following characters so they are accepted as part of a word
WORDCHARS 0123456789'
# Disable hyphenation
BREAK 0
# plural
SFX A N 3
SFX A 0 s [^x]
SFX A 0 es x
SFX A y ies
# possession
SFX B N 1
SFX B 0 's
# past tense
SFX C N 4
SFX C 0 d e
SFX C 0 ed [rt]
SFX C 0 ped p
SFX C 0 ged g
# present continuous
SFX D N 3
SFX D 0 ging g
SFX D 0 ing [rt]
SFX D e ing e

View File

@ -0,0 +1,384 @@
383
ACPI/AB
ACS/AB
API/AB
APIC
AUFS
AWS/AB
Ansible/B
AppArmor/B
BDF/AB
BusyBox/B
CFS/AB
CLI/AB
CNI/AB
CNM/AB
CPUID/AB
CRI-O/B
CRI/AB
CVE/AB
Cassandra/B
CentOS/B
DAX/AB
DMA/AB
DPDK/AB
DaemonSet/AB
Debian/B
DevStack/B
Diffie/B
DinD/B
Django/B
Docker/B
Dockerfile/AB
Dragonball/B
EulerOS/B
FS/AB
FaaS/B
Facebook/B
Fedora/B
Fluentd/B
Frakti/B
GCE/AB
GOPATH/AB
GPG/AB
GPU/AB
GSC/AB
GVT/AB
Git/B
GitHub/B
GoDoc/B
Golang/B
Grafana/B
Gramine/B
Haswell/B
Huawei/B
HugePage/AB
Hyp
IOMMU/AB
IOV/AB
IPerf/B
IaaS/B
Inclavare/B
Infiniband
IoT/AB
Istio/B
Itanium/AB
Ivybridge/B
JSON/AB
Jaeger/B
Jenkins/B
Jupyter/B
KCSA/AB
KSM/AB
KVM/AB
Kat/AB
Kata/B
Kibana/B
Kubelet/B
Kubernetes/B
LTS/AB
Launchpad/B
LevelDB/B
Linkerd/B
LinuxONE/B
Logrus/B
Logstash/B
Longterm
MACVTAP/AB
MDEV/AB
MITRE/B
MacOS/B
Mellanox/B
Minikube/B
NEMU/AB
NIC/AB
NVDIMM/AB
NVIDIA/A
Netlify/B
Nginx/B
OCI/AB
OS/AB
OVMF/AB
OpenAPI
OpenCensus/B
OpenPGP/B
OpenSSL/B
OpenSUSE/B
OpenShift/B
OpenStack/B
OpenTelemetry/B
OpenTracing/B
OverlayFS/B
PCDIMM/AB
PCI/AB
PCIe/AB
PID/AB
PNG/AB
POD/AB
PR/AB
PSS/AB
PaaS/B
Pandoc/B
Podman/B
PullApprove/B
Pytorch/B
QA/AB
QAT/AB
QEMU/AB
Quadro
QuickAssist/B
R/B
RBAC/AB
RDMA/AB
RHEL/B
RNG/AB
Rustlang/B
SCSI/AB
SDK/AB
SELinux/B
SHA/AB
SLES/B
SPDX/AB
SQLite/B
SRIOV/AB
SUSE/B
SVG/AB
SaaS/B
SemaphoreCI/B
Struct/A#
Sysbench/B
TBD/AB
TDX
TOC/AB
TOML/AB
TTY/AB
Tokio/B
TravisCI/B
UDS
UI/AB
UTS/AB
UUID/AB
Ubuntu/B
Upcall
VETH/AB
VF/AB
VFIO/AB
VGPU/AB
VHOST/AB
VLAN/AB
VM/AB
VMCache/AB
VMM/AB
VMWare/B
VMX/AB
VPP/AB
VSOCK/AB
VSS/AB
Vagrantfile/B
Vexxhost/B
VirtIO/AB
Virtio-fs/AB
Virtio-mem/AB
WIP/AB
WRT/AB
XIP/AB
Xeon/A
YAML/AB
Yamux/B
Zun/B
aarch64/B
ack/A
amd64/B
arg
arm64/B
auditability
backend
backport/ACD
backtick/AB
backtrace
blogbench/B
bootloader/AB
ccloudvm/B
centric/B
cgroup/AB
checkbox/A
chipset/AB
cnn/B
codebase
codecov/B
commandline
committer/AB
config/AB
containerd/B
coredump/A
cpuid
cpuset/AB
cri-o/B
crypto
cryptoprocessor/AB
dbs
deliverable/AB
deploy
dev
devicemapper/B
dialer
dialog/A
dind/B
distro/AB
dracut/B
emptydir/A
enablement/AB
entrypoint/AB
ethernet
fdt
filename/AB
filesystem/AB
fio/B
freeform
fs/B
gRPC/AB
gic
golang/B
goroutine/AB
hostname/AB
hotplug/ACD
howto/AB
hugepage/AB
hypercall/A
hypervisor/AB
iPerf/B
implementer/A
implementor/A
init/AB
initramfs/AB
initrd/AB
ioctl/A
iodepth/A
ioengine/A
iptables
irq/AB
journald/B
jq/B
k8s/B
kata
keypair/A
libcontainer/B
libelf/B
libvirt/B
lifecycle/A
linter/AB
logfile/A
longterm
loopback
macOS/B
mem/B
memcpy/A
memdisk/B
mergeable
metadata
microcontroller/AB
miniOS
minikube/B
mmap/AB
mmio/AB
mptable
msg/AB
msr
musl/B
nack/AB
namespace/ABCD
netlink
netns/AB
nvidia/A
onwards
openSUSE/B
osbuilder/B
packagecloud/B
parallelize/AC
passthrough
patchset/A
pio
pluggable/AB
plugin/AB
pmem/B
portmapper/AB
portmapping/A
ppc64el/B
ppc64le/B
pre
prefetch/ACD
prestart
programmatically
proxying
ramdisk/A
raytracer/B
readonly
rebase/ACD
refactor/ACD
regexp/AB
remediate
repo/A
rkt/B/B
rootfs/AB
runV/B
runc/B
runtime/AB
rustlang/B
s390x/B
scalability
screenshot/AB
seccomp
serverless
signoff/A
snapcraft/B
snapd/B
stalebot/B
startup
stderr/AB
stdin/AB
stdout/AB
struct/A
subdirectory/A
swappiness
sysctl/AB
syslog/AB
systemd/B
tarball/AB
tdx
teardown
templating
tf/B
timestamp/AB
tracability
ttRPC/B
udev/B
uevent/AB
unbootable
uncomment/ACD
uninstall
unported
unskip/AC
untrusted
untrusting
upcall
userid/AB
userspace/B
vCPU/AB
vGPU
vSphere/B
vendored
vendoring
versioning
vhost/AB
virtcontainers/B
virtio/AB
virtualization
virtualized
vmm
webhook/AB
whitespace
workflow/A
x86/B
x86_64/B
yaml
yq/B

View File

@ -0,0 +1,336 @@
#!/bin/bash
# Copyright (c) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# Description: spell-check utility.
[ -n "$DEBUG" ] && set -x
set -o errexit
set -o pipefail
set -o nounset
# Ensure we spell check in English
LANG=C
LC_ALL=C
script_name=${0##*/}
if [ "$(uname -s)" == "Darwin" ]
then
# Hunspell dictionaries are a not easily available
# on this platform it seems.
echo "INFO: $script_name: OSX not supported - exiting"
exit 0
fi
self_dir=$(dirname "$(readlink -f "$0")")
cidir="${self_dir}/../../../tests"
source "${cidir}/common.bash"
# Directory containing word lists.
#
# Each file in this directory must:
#
# - Have the ".txt" extension.
# - Contain one word per line.
#
# Additionally, the files may contain blank lines and comments
# (lines beginning with '#').
KATA_DICT_FRAGMENT_DIR=${KATA_DICT_FRAGMENT_DIR:-data}
KATA_DICT_NAME="${KATA_DICT_NAME:-kata-dictionary}"
# Name of dictionary file suitable for using with hunspell(1)
# as a personal dictionary.
KATA_DICT_FILE="${KATA_DICT_FILE:-${KATA_DICT_NAME}.dic}"
KATA_RULES_FILE="${KATA_RULES_FILE:-${KATA_DICT_FILE/.dic/.aff}}"
# command to remove code from markdown (inline and blocks)
strip_cmd="${cidir}/kata-doc-to-script.sh"
fragment_dir="${self_dir}/${KATA_DICT_FRAGMENT_DIR}"
# Name of file containing dictionary rules that apply to the
# KATA_DICT_FILE word list.
rules_file_name="rules.aff"
# Command to spell check a file
spell_check_cmd="${KATA_SPELL_CHECK_CMD:-hunspell}"
# Command to convert a markdown file into plain text
md_convert_tool="${KATA_MARKDOWN_CONVERT_TOOL:-pandoc}"
KATA_DICT_DIR="${KATA_DICT_DIR:-${self_dir}}"
dict_file="${KATA_DICT_DIR}/${KATA_DICT_FILE}"
rules_file="${KATA_DICT_DIR}/${KATA_RULES_FILE}"
# Hunspell refers to custom dictionary by their path followed by the name of
# the dictionary (without the file extension).
kata_dict_ref="${KATA_DICT_DIR}/${KATA_DICT_NAME}"
# All project documentation must be written in English,
# with American English taking priority.
#
# We also use a custom dictionary which has to be specified by its
# "directory and name prefix" and which must also be the first specified
# dictionary.
dict_languages="${kata_dict_ref},en_US,en_GB"
make_dictionary()
{
[ -d "$fragment_dir" ] || die "invalid fragment directory"
[ -z "$dict_file" ] && die "missing dictionary output file name"
# Note: the first field is extracted to allow for inline
# comments in each fragment. For example:
#
# word # this text describes why the word is in the dictionary.
#
local dict
dict=$(cat "$fragment_dir"/*.txt |\
grep -v '^\#' |\
grep -v '^$' |\
awk '{print $1}' |\
sort -u || true)
[ -z "$dict" ] && die "generated dictionary is empty"
# Now, add in the number of words as a header (required by Hunspell)
local count
count=$(echo "$dict"| wc -l | awk '{print $1}' || true)
[ -z "$count" ] && die "cannot determine dictionary length"
[ "$count" -eq 0 ] && die "invalid dictionary length"
# Construct the dictionary
(echo "$count"; echo "$dict") > "$dict_file"
cp "${fragment_dir}/${rules_file_name}" "${rules_file}"
}
spell_check_file()
{
local file="$1"
[ -z "$file" ] && die "need file to check"
[ -e "$file" ] || die "file does not exist: '$file'"
[ -e "$dict_file" ] || make_dictionary
info "Spell checking file '$file'"
# Determine the pandoc input format.
local pandoc_input_fmts
local pandoc_input_fmt
local pandoc_input_fmts=$(pandoc --list-input-formats 2>/dev/null || true)
if [ -z "$pandoc_input_fmts" ]
then
# We're using a very old version of pandoc that doesn't
# support listing its available input formats, so
# specify a default.
pandoc_input_fmt="markdown_github"
else
# Pandoc has multiple names for the gfm parser so find one of them
pandoc_input_fmt=$(echo "$pandoc_input_fmts" |\
grep -E "gfm|github" |\
head -1 || true)
fi
[ -z "$pandoc_input_fmt" ] && die "cannot find usable pandoc input format"
local stripped_doc
local pandoc_doc
local utf8_free_doc
local pre_hunspell_doc
local hunspell_results
local final_results
# First strip out all code blocks and convert all
# "quoted apostrophe's" ('\'') back into a single apostrophe.
stripped_doc=$("$strip_cmd" -i "$file" -)
# Next, convert the remainder it into plain text to remove the
# remaining markdown syntax.
#
# Before pandoc gets hold of it:
#
# - Replace pipes with spaces. This
# fixes an issue with old versions of pandoc (Ubuntu 16.04)
# which completely mangle tables into nonsense.
#
# - Remove empty reference links.
#
# For example, this markdown
#
# blah [`qemu-lite`][qemu-lite] blah.
# :
# [qemu-lite]: https://...
#
# Gets converted into
#
# blah [][qemu-lite] blah.
# :
# [qemu-lite]: https://...
#
# And the empty set of square brackets confuses pandoc.
#
# After pandoc has processed the data, remove any remaining
# "inline links" in this format:
#
# [link name](#link-address)
#
# This is strictly only required for old versions of pandoc.
pandoc_doc=$(echo "$stripped_doc" |\
tr '|' ' ' |\
sed 's/\[\]\[[^]]*\]//g' |\
"$md_convert_tool" -f "${pandoc_input_fmt}" -t plain - |\
sed 's/\[[^]]*\]([^\)]*)//g' || true)
# Convert the file into "pure ASCII" by removing all awkward
# Unicode characters that won't spell check.
#
# Necessary since pandoc is "clever" and will convert things like
# GitHub's colon emojis (such as ":smile:") into the actual utf8
# character where possible.
utf8_free_doc=$(echo "$pandoc_doc" | iconv -c -f utf-8 -t ascii)
# Next, perform the following simplifications:
#
# - Remove URLs.
# - Remove email addresses.
# - Replace most punctuation symbols with a space
# (excluding a dash (aka hyphen!)
# - Carefully remove non-hyphen dashes.
# - Remove GitHub @userids.
pre_hunspell_doc=$(echo "$utf8_free_doc" |\
sed 's,https*://[^[:space:]()][^[:space:]()]*,,g' |\
sed -r 's/[a-zA-Z0-9.-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9.-]+//g' |\
tr '[,\[\]()\*\\/\|=]' ' ' |\
sed -e 's/^ *-//g' -e 's/- $//g' -e 's/ -//g' |\
sed 's/@[a-zA-Z0-9][a-zA-Z0-9]*\b//g')
# Call the spell checker
hunspell_results=$(echo "$pre_hunspell_doc" | $spell_check_cmd -d "${dict_languages}")
# Finally, post-process the hunspell output:
#
# - Parse the output to ignore:
# - Hunspell banner.
# - Correctly spelt words (lines starting with '*', '+' or '-').
# - All words containing numbers (like "100MB").
# - All words that appear to be acronymns / Abbreviations
# (atleast two upper-case letters and which may be plural or
# possessive).
# - All words that appear to be numbers.
# - All possessives and the dreaded isolated "'s" which occurs
# for input like this:
#
# `kata-shim`'s
#
# which gets converted by $strip_cmd into simply:
#
# 's
#
# - Sort output.
final_results=$(echo "$hunspell_results" |\
grep -Evi "(ispell|hunspell)" |\
grep -Ev '^(\*|\+|-)' |\
grep -Evi "^(&|#) [^ ]*[0-9][^ ]*" |\
grep -Ev "^. [A-Z][A-Z][A-Z]*(s|'s)*" |\
grep -Ev "^. 's" |\
sort -u || true)
local line
local incorrects
local near_misses
near_misses=$(echo "$final_results" | grep '^&' || true)
incorrects=$(echo "$final_results" | grep '^\#' | awk '{print $2}' || true)
local -i failed=0
[ -n "$near_misses" ] && failed+=1
[ -n "$incorrects" ] && failed+=1
echo "$near_misses" | while read -r line
do
[ "$line" = "" ] && continue
local word
local possibles
word=$(echo "$line" | awk '{print $2}')
possibles=$(echo "$line" | cut -d: -f2- | sed 's/^ *//g')
warn "Word '${word}': did you mean one of the following?: ${possibles}"
done
local incorrect
for incorrect in $incorrects
do
warn "Incorrect word: '$incorrect'"
done
[ "$failed" -gt 0 ] && die "Spell check failed for file: '$file'"
info "Spell check successful for file: '$file'"
}
delete_dictionary()
{
rm -f "${KATA_DICT_FILE}" "${KATA_RULES_FILE}"
}
setup()
{
local cmd
for cmd in "$spell_check_cmd" "$md_convert_tool"
do
command -v "$cmd" &>/dev/null || die "Need $cmd command"
done
}
usage()
{
cat <<-EOF
Usage: ${script_name} <command> [arguments]
Description: Spell-checking utility.
Commands:
check <file> : Spell check the specified file
(implies 'make-dict').
delete-dict : Delete the dictionary.
help : Show this usage.
make-dict : Create the dictionary.
EOF
}
main()
{
setup
[ -z "${1:-}" ] && usage && echo && die "need command"
case "$1" in
check) shift && spell_check_file "$1" ;;
delete-dict) delete_dictionary ;;
help|-h|--help) usage && exit 0 ;;
make-dict) make_dictionary ;;
*) die "invalid command: '$1'" ;;
esac
}
main "$@"

View File

@ -0,0 +1,32 @@
#
# Copyright (c) 2017-2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
TARGET = kata-github-labels
SOURCES = $(shell find . -type f 2>&1 | grep -E '.*\.go$$')
VERSION := ${shell cat ./VERSION}
COMMIT_NO := $(shell git rev-parse HEAD 2> /dev/null || true)
COMMIT := $(if $(shell git status --porcelain --untracked-files=no),"${COMMIT_NO}-dirty","${COMMIT_NO}")
BINDIR := $(GOPATH)/bin
DESTTARGET := $(abspath $(BINDIR)/$(TARGET))
default: install
check: $(SOURCES)
go test -v ./...
$(TARGET): $(SOURCES)
go build -o "$(TARGET)" -ldflags "-X main.name=${TARGET} -X main.commit=${COMMIT} -X main.version=${VERSION}" .
install: $(TARGET)
install -d $(shell dirname $(DESTTARGET))
install $(TARGET) $(DESTTARGET)
clean:
rm -f $(TARGET)
.PHONY: install clean

View File

@ -0,0 +1,71 @@
# Overview
The Kata Project uses a number of GitHub repositories. To allow issues and PRs
to be handled consistently between repositories a standard set of issue labels
are used. These labels are stored in YAML format in the master
[labels database template](labels.yaml.in). This file is human-readable,
machine-readable, and self-describing (see the file for the introductory
description).
Each repository can contain a set of additional (repository-specific) labels,
which are stored in a top-level YAML template file called `labels.yaml.in`.
Expanding the templates and merging the two databases describes the full set
of labels a repository uses.
# Generating the combined labels database
You can run the `github_labels.sh` script with the `generate` argument to
create the combined labels database. The additional arguments specify the
repository (in order to generate the combined labels database) and the name of
a file to write the combined database:
```sh
$ ./github-labels.sh generate github.com/kata-containers/kata-containers /tmp/combined.yaml
```
This script validates the combined labels database by performing a number of
checks, including running the `kata-github-labels` tool in checking mode. See
the
[Checking and summarising the labels database](#checking-and-summarising-the-labels-database)
section for more information.
# Checking and summarising the labels database
The `kata-github-labels` tool checks and summarizes the labels database for
each repository.
## Show labels
Displays a summary of the labels:
```sh
$ kata-github-labels show labels labels.yaml
```
## Show categories
Shows all information about categories:
```sh
$ kata-github-labels show categories --with-labels labels.yaml
```
## Check only
Performs checks on a specified labels database:
```sh
$ kata-github-labels check labels.yaml
```
## Full details
Lists all available options:
```sh
$ kata-github-labels -h
```
# Archive of old GitHub labels
See the [archive documentation](archive).

View File

@ -0,0 +1 @@
0.0.1

View File

@ -0,0 +1,50 @@
# GitHub labels archive
## Overview
This directory contains one YAML file per repository containing the original
set of GitHub labels before the
[new ones were applied on 2019-06-04](../labels.yaml.in).
## How the YAML files were created
This section explains how the YAML files were created.
The [`labeler`](https://github.com/tonglil/labeler) tool was used to read
the labels and write them to a YAML file.
### Install and patch the `labeler` tool
This isn't ideal but our [labels database](../labels.yaml.in) mandates
descriptions for every label. However, at the time of writing, the `labeler`
tool does not support descriptions. But,
[there is a PR](https://github.com/tonglil/labeler/pull/37)
to add in description support.
To enable description support:
```sh
$ go get -u github.com/tonglil/labeler
$ cd $GOPATH/src/github.com/tonglil/labeler
$ pr=37
$ pr_branch="PR${pr}"
$ git fetch origin "refs/pull/${pr}/head:{pr_branch}"
$ git checkout "${pr_branch}"
$ go install -v ./...
```
### Save GitHub labels for a repository
Run the following for reach repository:
```sh
$ labeler scan -r ${github_repo_slug} ${output_file}
```
For example, to save the labels for the `tests` repository:
```sh
$ labeler scan -r kata-containers/tests tests.yaml
```

View File

@ -0,0 +1,58 @@
# Scanned and autogenerated by https://github.com/tonglil/labeler
---
repo: kata-containers/ci
labels:
- name: P1
color: b60205
description: Highest priority issue (Critical)
- name: P2
color: d93f0b
description: Urgent issue
- name: P3
color: fbca04
description: Important issue
- name: P4
color: fef2c0
description: Noteworthy issue
- name: backlog
color: ededed
- name: bitesize
color: d4c5f9
description: small/easy task
- name: bug
color: d73a4a
description: Something isn't working
- name: do-not-merge
color: b60205
- name: duplicate
color: cfd3d7
description: This issue or pull request already exists
- name: enhancement
color: a2eeef
description: New feature or request
- name: good first issue
color: 7057ff
description: Good for newcomers
- name: help wanted
color: "008672"
description: Extra attention is needed
- name: in progress
color: ededed
- name: invalid
color: e4e669
description: This doesn't seem right
- name: next
color: ededed
- name: question
color: d876e3
description: Further information is requested
- name: review
color: ededed
- name: security
color: fbca04
- name: wip
color: b60205
description: Work In Progress
- name: wontfix
color: ffffff
description: This will not be worked on

View File

@ -0,0 +1,27 @@
# Scanned and autogenerated by https://github.com/tonglil/labeler
---
repo: kata-containers/community
labels:
- name: WIP
color: b60205
- name: bitesize
color: d4c5f9
description: small/easy task
- name: bug
color: ee0701
- name: do-not-merge
color: b60205
- name: duplicate
color: cccccc
- name: enhancement
color: 84b6eb
- name: good first issue
color: 7057ff
- name: help wanted
color: 33aa3f
- name: invalid
color: e6e6e6
- name: question
color: cc317c
- name: wontfix
color: ffffff

View File

@ -0,0 +1,44 @@
# Scanned and autogenerated by https://github.com/tonglil/labeler
---
repo: kata-containers/kata-containers
labels:
- name: P1
color: b60205
description: Highest priority issue (Critical)
- name: P2
color: d93f0b
description: Urgent issue
- name: P3
color: fbca04
description: Important issue
- name: P4
color: fef2c0
description: Noteworthy issue
- name: bitesize
color: d4c5f9
description: small/easy task
- name: bug
color: ee0701
- name: devices
color: 006b75
description: direct device support
- name: duplicate
color: cccccc
- name: enhancement
color: 84b6eb
- name: feature
color: ef70a3
- name: good first issue
color: 7057ff
- name: help wanted
color: 33aa3f
- name: invalid
color: e6e6e6
- name: limitation
color: c2e0c6
- name: question
color: cc317c
- name: security
color: fbca04
- name: wontfix
color: ffffff

View File

@ -0,0 +1,60 @@
# Scanned and autogenerated by https://github.com/tonglil/labeler
---
repo: kata-containers/tests
labels:
- name: CI
color: 0052cc
description: Continuous Integration
- name: P1
color: b60205
description: Highest priority issue (Critical)
- name: P2
color: d93f0b
description: Urgent issue
- name: P3
color: fbca04
description: Important issue
- name: P4
color: fef2c0
description: Noteworthy issue
- name: backlog
color: ededed
- name: bitesize
color: d4c5f9
description: small/easy task
- name: bug
color: ee0701
- name: do-not-merge
color: b60205
- name: duplicate
color: cccccc
- name: enhancement
color: 84b6eb
- name: good first issue
color: 7057ff
- name: hackathon
color: 35bfa1
description: PR/Issues in hackathon events
- name: help wanted
color: 33aa3f
- name: in progress
color: ededed
- name: invalid
color: e6e6e6
- name: limitation
color: c2e0c6
- name: next
color: ededed
- name: question
color: cc317c
- name: review
color: ededed
- name: security
color: fbca04
- name: stable-candidate
color: bfdadc
description: Candidate to backport to stable branches
- name: wip
color: b60205
- name: wontfix
color: ffffff

View File

@ -0,0 +1,216 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"errors"
"fmt"
"strings"
"unicode"
)
func containsWhitespace(s string) bool {
for _, ch := range s {
if unicode.IsSpace(ch) {
return true
}
}
return false
}
func isLower(s string) bool {
for _, ch := range s {
if !unicode.IsLetter(ch) {
continue
}
if !unicode.IsLower(ch) {
return false
}
}
return true
}
func checkCategory(c Category) error {
if c.Name == "" {
return fmt.Errorf("category name cannot be blank: %+v", c)
}
if containsWhitespace(c.Name) {
return fmt.Errorf("category name cannot contain whitespace: %+v", c)
}
if !isLower(c.Name) {
return fmt.Errorf("category name must be all lower case: %+v", c)
}
if c.Description == "" {
return fmt.Errorf("category description cannot be blank: %+v", c)
}
first := c.Description[0]
if !unicode.IsUpper(rune(first)) {
return fmt.Errorf("category description needs initial capital letter: %+v", c)
}
if !strings.HasSuffix(c.Description, ".") {
return fmt.Errorf("category description needs trailing period: %+v", c)
}
return nil
}
func checkLabel(l Label) error {
if l.Name == "" {
return fmt.Errorf("label name cannot be blank: %+v", l)
}
if !isLower(l.Name) {
return fmt.Errorf("label name must be all lower case: %+v", l)
}
if containsWhitespace(l.Name) {
return fmt.Errorf("label name cannot contain whitespace: %+v", l)
}
if l.Description == "" {
return fmt.Errorf("label description cannot be blank: %+v", l)
}
first := l.Description[0]
if !unicode.IsUpper(rune(first)) {
return fmt.Errorf("label description needs initial capital letter: %+v", l)
}
if l.CategoryName == "" {
return fmt.Errorf("label category name cannot be blank: %+v", l)
}
if l.Colour == "" {
return fmt.Errorf("label colour cannot be blank: %+v", l)
}
return nil
}
func checkLabelsAndCategories(lf *LabelsFile) error {
catCount := 0
var catNameMap map[string]int
var catDescMap map[string]int
var labelNameMap map[string]int
var labelDescMap map[string]int
catNameMap = make(map[string]int)
catDescMap = make(map[string]int)
labelNameMap = make(map[string]int)
labelDescMap = make(map[string]int)
for _, c := range lf.Categories {
if err := checkCategory(c); err != nil {
return err
}
catCount++
if _, ok := catNameMap[c.Name]; ok {
return fmt.Errorf("duplicate category name: %+v", c)
}
catNameMap[c.Name] = 0
if _, ok := catDescMap[c.Description]; ok {
return fmt.Errorf("duplicate category description: %+v", c)
}
catDescMap[c.Description] = 0
}
if catCount == 0 {
return errors.New("no categories found")
}
labelCount := 0
for _, l := range lf.Labels {
if err := checkLabel(l); err != nil {
return err
}
if _, ok := labelNameMap[l.Name]; ok {
return fmt.Errorf("duplicate label name: %+v", l)
}
labelNameMap[l.Name] = 0
if _, ok := labelDescMap[l.Description]; ok {
return fmt.Errorf("duplicate label description: %+v", l)
}
labelDescMap[l.Description] = 0
labelCount++
catName := l.CategoryName
var value int
var ok bool
if value, ok = catNameMap[catName]; !ok {
return fmt.Errorf("invalid category %v found for label %+v", catName, l)
}
// Record category name seen and count of occurrences
value++
catNameMap[catName] = value
}
if labelCount == 0 {
return errors.New("no labels found")
}
if debug {
fmt.Printf("DEBUG: category count: %v\n", catCount)
fmt.Printf("DEBUG: label count: %v\n", labelCount)
}
for name, count := range catNameMap {
if count == 0 {
return fmt.Errorf("category %v not used", name)
}
if debug {
fmt.Printf("DEBUG: category %v: label count: %d\n",
name, count)
}
}
return nil
}
func check(lf *LabelsFile) error {
if lf.Description == "" {
return errors.New("description cannot be blank")
}
if lf.Repo == "" {
return errors.New("repo cannot be blank")
}
if len(lf.Categories) == 0 {
return errors.New("no categories")
}
if len(lf.Labels) == 0 {
return errors.New("no labels")
}
return checkLabelsAndCategories(lf)
}

View File

@ -0,0 +1,62 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import "strings"
func cleanString(s string) string {
result := strings.Replace(s, "\n", " ", -1)
result = strings.Replace(result, "\t", "\\t", -1)
result = strings.TrimSpace(result)
return result
}
func cleanLabel(l Label) Label {
return Label{
Name: cleanString(l.Name),
Description: cleanString(l.Description),
CategoryName: cleanString(l.CategoryName),
Colour: cleanString(l.Colour),
From: cleanString(l.From),
}
}
func cleanCategory(c *Category) {
c.Name = cleanString(c.Name)
c.Description = cleanString(c.Description)
c.URL = cleanString(c.URL)
}
func cleanCategories(lf *LabelsFile) {
var cleaned Categories
for _, c := range lf.Categories {
cleanCategory(&c)
cleaned = append(cleaned, c)
}
lf.Categories = cleaned
}
func cleanLabels(lf *LabelsFile) {
var cleaned Labels
for _, l := range lf.Labels {
new := cleanLabel(l)
cleaned = append(cleaned, new)
}
lf.Labels = cleaned
}
func clean(lf *LabelsFile) {
lf.Description = cleanString(lf.Description)
lf.Repo = cleanString(lf.Repo)
cleanCategories(lf)
cleanLabels(lf)
}

View File

@ -0,0 +1,83 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"os"
"sort"
)
var outputFile = os.Stdout
// displayHandler is an interface that all output display handlers
// (formatters) must implement.
type DisplayHandler interface {
DisplayLabels(lf *LabelsFile) error
DisplayCategories(lf *LabelsFile, showLabels bool) error
}
// DisplayHandlers encapsulates the list of available display handlers.
type DisplayHandlers struct {
handlers map[string]DisplayHandler
}
// handlers is a map of the available output format display handling
// implementations.
var handlers map[string]DisplayHandler
// NewDisplayHandlers create a new DisplayHandler.
func NewDisplayHandlers() *DisplayHandlers {
if handlers == nil {
handlers = make(map[string]DisplayHandler)
handlers["md"] = NewDisplayMD(outputFile)
handlers[textFormat] = NewDisplayText(outputFile)
handlers["tsv"] = NewDisplayTSV(outputFile)
}
h := &DisplayHandlers{
handlers: handlers,
}
return h
}
// find looks for a display handler corresponding to the specified format
func (d *DisplayHandlers) find(format string) DisplayHandler {
for f, handler := range d.handlers {
if f == format {
return handler
}
}
return nil
}
// Get returns a list of the available formatters (display handler names).
func (d *DisplayHandlers) Get() []string {
var formats []string
for f := range d.handlers {
formats = append(formats, f)
}
sort.Strings(formats)
return formats
}
func show(inputFilename string, handler DisplayHandler, what DataToShow, withLabels bool) error {
lf, err := readYAML(inputFilename)
if err != nil {
return err
}
if what == showLabels {
return handler.DisplayLabels(lf)
}
return handler.DisplayCategories(lf, withLabels)
}

View File

@ -0,0 +1,75 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"os"
"github.com/olekukonko/tablewriter"
)
type displayMD struct {
writer *tablewriter.Table
}
func NewDisplayMD(file *os.File) DisplayHandler {
md := &displayMD{}
md.writer = tablewriter.NewWriter(file)
md.writer.SetCenterSeparator("|")
md.writer.SetBorders(tablewriter.Border{
Left: true,
Right: true,
Top: false,
Bottom: false,
})
// Critical for GitHub Flavoured Markdown
md.writer.SetAutoWrapText(false)
return md
}
func (d *displayMD) render(headerFields []string, records [][]string) {
d.writer.SetHeader(headerFields)
d.writer.AppendBulk(records)
d.writer.Render()
}
func (d *displayMD) DisplayLabels(lf *LabelsFile) error {
var records [][]string
for _, l := range lf.Labels {
record := labelToRecord(l, true)
records = append(records, record)
}
headerFields := labelHeaderRecord()
d.render(headerFields, records)
return nil
}
func (d *displayMD) DisplayCategories(lf *LabelsFile, showLabels bool) error {
headerFields := categoryHeaderRecord(showLabels)
var records [][]string
for _, c := range lf.Categories {
record, err := categoryToRecord(lf, c, showLabels, true)
if err != nil {
return err
}
records = append(records, record)
}
d.render(headerFields, records)
return nil
}

View File

@ -0,0 +1,101 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"fmt"
"os"
)
type displayText struct {
file *os.File
}
func NewDisplayText(file *os.File) DisplayHandler {
return &displayText{
file: file,
}
}
func (d *displayText) DisplayLabels(lf *LabelsFile) error {
_, err := fmt.Fprintf(d.file, "Labels (count: %d):\n", len(lf.Labels))
if err != nil {
return err
}
for _, l := range lf.Labels {
err = d.displayLabel(l)
if err != nil {
return err
}
}
return nil
}
func (d *displayText) displayLabel(l Label) error {
_, err := fmt.Fprintf(d.file, " %s (%q) [category %q, colour %q, from %q]\n",
l.Name,
l.Description,
l.CategoryName,
l.Colour,
l.From)
return err
}
func (d *displayText) DisplayCategories(lf *LabelsFile, showLabels bool) error {
_, err := fmt.Fprintf(d.file, "Categories (count: %d):\n", len(lf.Categories))
if err != nil {
return err
}
for _, c := range lf.Categories {
err := d.displayCategory(c, lf, showLabels)
if err != nil {
return err
}
}
return nil
}
func (d *displayText) displayCategory(c Category, lf *LabelsFile, showLabels bool) error {
if showLabels {
labels, err := getLabelsByCategory(c.Name, lf)
if err != nil {
return err
}
_, err = fmt.Fprintf(d.file, " %s (%q, label count: %d, url: %v)\n",
c.Name,
c.Description,
len(labels),
c.URL)
if err != nil {
return err
}
for _, label := range labels {
_, err := fmt.Fprintf(d.file, " %s (%q)\n",
label.Name,
label.Description)
if err != nil {
return err
}
}
} else {
_, err := fmt.Printf(" %s (%q, url: %v)\n",
c.Name,
c.Description,
c.URL)
if err != nil {
return err
}
}
return nil
}

View File

@ -0,0 +1,66 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"encoding/csv"
"os"
)
type displayTSV struct {
writer *csv.Writer
}
func NewDisplayTSV(file *os.File) DisplayHandler {
tsv := &displayTSV{}
tsv.writer = csv.NewWriter(file)
// Tab separator
tsv.writer.Comma = rune('\t')
return tsv
}
func (d *displayTSV) DisplayLabels(lf *LabelsFile) error {
record := labelHeaderRecord()
if err := d.writer.Write(record); err != nil {
return err
}
for _, l := range lf.Labels {
record := labelToRecord(l, false)
if err := d.writer.Write(record); err != nil {
return err
}
}
d.writer.Flush()
return d.writer.Error()
}
func (d *displayTSV) DisplayCategories(lf *LabelsFile, showLabels bool) error {
record := categoryHeaderRecord(showLabels)
if err := d.writer.Write(record); err != nil {
return err
}
for _, c := range lf.Categories {
record, err := categoryToRecord(lf, c, showLabels, false)
if err != nil {
return err
}
if err := d.writer.Write(record); err != nil {
return err
}
}
d.writer.Flush()
return d.writer.Error()
}

View File

@ -0,0 +1,176 @@
#!/bin/bash
#
# Copyright (c) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# Description: Generate the combined GitHub labels database for the
# specified repository.
set -e
script_name=${0##*/}
source "/etc/os-release" || "source /usr/lib/os-release"
self_dir=$(dirname "$(readlink -f "$0")")
cidir="${self_dir}/../.."
source "${cidir}/common.bash"
typeset -r labels_file="labels.yaml"
typeset -r labels_template="${labels_file}.in"
typeset -r master_labels_file="${self_dir}/${labels_file}"
typeset -r master_labels_template="${self_dir}/${labels_template}"
# The GitHub labels API requires a colour for each label so
# default to a white background.
typeset -r default_color="ffffff"
need_yq() {
# install yq if not exist
${cidir}/install_yq.sh
command -v yq &>/dev/null || \
die 'yq command not found. Ensure "$GOPATH/bin" is in your $PATH.'
}
merge_yaml()
{
local -r file1="$1"
local -r file2="$2"
local -r out="$3"
[ -n "$file1" ] || die "need 1st file"
[ -n "$file2" ] || die "need 2nd file"
[ -n "$out" ] || die "need output file"
need_yq
yq merge "$file1" --append "$file2" > "$out"
}
check_yaml()
{
local -r file="$1"
[ -n "$file" ] || die "need file to check"
need_yq
yq read "$file" >/dev/null
[ -z "$(command -v yamllint)" ] && die "need yamllint installed"
# Deal with different versions of the tool
local opts=""
local has_strict_opt=$(yamllint --help 2>&1|grep -- --strict)
[ -n "$has_strict_opt" ] && opts+="--strict"
yamllint $opts "$file"
}
# Expand the variables in the labels database.
generate_yaml()
{
local repo="$1"
local template="$2"
local out="$3"
[ -n "$repo" ] || die "need repo"
[ -n "$template" ] || die "need template"
[ -n "$out" ] || die "need output file"
local repo_slug=$(echo "${repo}"|sed 's!github.com/!!g')
sed \
-e "s|REPO_SLUG|${repo_slug}|g" \
-e "s|DEFAULT_COLOUR|${default_color}|g" \
"$template" > "$out"
check_yaml "$out"
}
cmd_generate()
{
local repo="$1"
local out_file="$2"
[ -n "$repo" ] || die "need repo"
[ -n "$out_file" ] || die "need output file"
# Create the master database from the template
generate_yaml \
"${repo}" \
"${master_labels_template}" \
"${master_labels_file}"
local -r repo_labels_template="${GOPATH}/src/${repo}/${labels_template}"
local -r repo_labels_file="${GOPATH}/src/${repo}/${labels_file}"
# Check for a repo-specific set of labels
if [ -e "${repo_labels_template}" ]; then
info "Found repo-specific labels database"
# Generate repo-specific labels from template
generate_yaml \
"${repo}" \
"${repo_labels_template}" \
"${repo_labels_file}"
# Combine the two databases
tmp=$(mktemp)
merge_yaml \
"${master_labels_file}" \
"${repo_labels_file}" \
"${tmp}"
mv "${tmp}" "${out_file}"
else
info "No repo-specific labels database"
cp "${master_labels_file}" "${out_file}"
fi
info "Generated labels database ${out_file}"
# Perform checks
kata-github-labels check "${out_file}"
}
usage()
{
cat <<EOF
Usage: ${script_name} help
${script_name} generate <repo-name> <output-file>
Examples:
# Generate combined labels database for runtime repo and write to
# specified file
\$ ${script_name} generate github.com/kata-containers/kata-containers /tmp/out.yaml
EOF
}
main()
{
case "$1" in
generate)
shift
cmd_generate "$@"
;;
help|"")
usage
exit 0
;;
*)
die "Invalid command: '$1'"
;;
esac
}
main "$@"

View File

@ -0,0 +1,555 @@
# Copyright (c) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
---
description: |
This file contains a list of all the generic GitHub labels used by all Kata
Containers GitHub repositories.
Each repository can optionally contain a top-level `labels.yaml` that
specifies a list of repository-specific labels (and possibly additional
categories). The labels in the repository-specific labels file plus the
labels defined in this file define the minimum list of labels for the
repository in question.
Each label must specify:
- Name (which must be lower-case without spaces)
- Description
- Category
- Colour (explicit colour, or `ffffff`)
A label may also specify a "From" value. This is used for renaming labels;
if a label has an associated "From" value, an existing label whose name is
specified by the "From" value will be renamed to the label name.
A category is a collective name used to describe one or more related labels.
Each category must specify:
- Name (which must be lower-case without spaces)
- Description
A category may also specify a related URL which points to a document
containing further information.
categories:
- name: api
description: Change related to an Application Programming Interface.
- name: architecture-committee
description: Needs input from the Architecture Committee.
url: https://github.com/kata-containers/community#architecture-committee
- name: area
description: Code component / general part of product affected.
- name: backport
description: |
Code that needs to be applied to other branches, generally older stable
ones.
- name: behaviour
description: |
How the issue affect the operation of the system. A more precise version
of regression.
- name: block
description: |
Stop a PR from being merged.
- name: cleanup
description: Refactoring, restructuring or general tidy-up needed.
- name: customer
description: Related to a customer.
- name: design
description: Requires formal review on the approach to solving the problem.
- name: detail
description: Need further information from the user or author.
- name: documentation
description: Needs more documentation.
- name: environment
description: Related to particular system environment.
- name: help
description: |
Request for technical help / extra resource. Also used for assisted
workflow.
- name: label-admin
description: Relates to the administration of labels.
- name: limitation
description: |
Issue cannot be resolved (too hard/impossible, would be too slow,
insufficient resources, etc).
url: |
https://github.com/kata-containers/kata-containers/blob/main/docs/Documentation-Requirements.md
- name: new-contributor
description: Small, self-contained tasks suitable for newcomers.
url: |
https://github.com/kata-containers/community/blob/main/CONTRIBUTING.md
- name: priority
description: |
Relative urgency (time-critical).
- name: question
description: Needs input from the team.
- name: rebase
description: Code conflicts need to be resolved.
- name: related
description: |
Related project. Base set can be generated from
https://github.com/kata-containers/kata-containers/blob/main/versions.yaml.
- name: release
description: Related to production of new versions.
- name: resolution
description: |
Issue is not (or no longer) valid for some reason. Label specifies
reason for closing.
- name: security
description: Potential or actual vulnerability / CVE.
url: https://github.com/kata-containers/community/blob/main/VMT/VMT.md
- name: severity
description: Relative importance (mission-critical).
- name: sizing
description: Estimate of the complexity of the task (story points).
- name: sub-type
description: More specific detail on the type category.
- name: team
description: Team that needs to analyse the issue.
- name: test
description: New tests needed.
- name: type
description: High-level summary of the issue.
- name: vendor
description: Related to handling imported code.
url: |
https://github.com/kata-containers/community/blob/main/CONTRIBUTING.md#re-vendor-prs
repo: kata-containers/kata-containers
labels:
- name: api-breakage
description: API was broken
category: api
color: ff0000
- name: api-change
description: API change
category: api
color: ffffff
- name: architecture-specific
description: Affects subset of architectures
category: environment
color: ffffff
- name: area/api
description: Application Programming Interface
category: area
color: ffffff
- name: area/cli
description: Command Line Interface (flags/options and arguments)
category: area
color: ffffff
- name: area/comms
description: Communications (gRPC, Yamux, etc)
category: area
color: ffffff
- name: area/config
description: Configuration
category: area
color: ffffff
- name: area/logging
description: Logging
category: area
color: ffffff
- name: area/networking
description: Networking
category: area
color: ffffff
- name: area/storage
description: Storage
category: area
color: ffffff
- name: area/tracing
description: Tracing
category: area
color: ffffff
- name: backport
description: Code needs to be applied to older (stable) releases
category: backport
color: ffffff
- name: bug
description: Incorrect behaviour
category: type
color: ff0000
- name: cannot-reproduce
description: Issue cannot be recreated
category: resolution
color: ffffff
- name: cleanup
description: General tidy-up
category: cleanup
color: ffffff
- name: crash
description: Causes part of the system to crash
category: behaviour
color: ffffff
- name: customer
description: Relates to a customer
category: customer
color: ffffff
- name: data-loss
description: System loses information
category: behaviour
color: ffffff
- name: deprecate
description: Highlight a feature that will soon be removed
category: cleanup
color: ffffff
- name: do-not-merge
description: PR has problems or depends on another
category: block
color: ff0000
- name: duplicate
description: Same issue as one already reported
category: resolution
color: ffffff
- name: enhancement
description: Improvement to an existing feature
category: type
color: ffffff
- name: feature
description: New functionality
category: type
color: ffffff
- name: good-first-issue
description: Small and simple task for new contributors
category: new-contributor
color: ffffff
- name: hang
description: System appears to stop operating or freeze
category: behaviour
color: ffffff
- name: high-priority
description: Very urgent issue (resolve quickly)
category: priority
color: ff7f00
- name: high-severity
description: Very important issue
category: severity
color: 00d7ff
- name: highest-priority
description: Critically urgent issue (must be resolved as soon as possible)
category: priority
color: ff0000
- name: highest-severity
description: Extremely important issue
category: severity
color: 00ffff
- name: invalid
description: Issue does not make sense
category: resolution
color: ffffff
- name: limitation
description: Issue cannot be resolved
category: limitation
color: ffffff
- name: medium-priority
description: Urgent issue (resolve before unprioritised issues)
category: priority
color: ffff00
- name: medium-severity
description: Important issue
category: severity
color: 0000ff
- name: needs-decision
description: Requires input from the Architecture Committee
category: architecture-committee
color: ffffff
- name: needs-design-doc
description: Needs a document explaining the design
category: design
color: ffffff
- name: needs-design-review
description: Needs a formal design review of the approach
category: design
color: ffffff
- name: needs-docs
description: Needs some new or updated documentation
category: documentation
color: ffffff
- name: needs-help
description: Request for extra help (technical, resource, etc)
category: help
color: ffffff
- name: needs-integration-tests
description: |
Needs new system/integration tests to validate behaviour in the tests
repository
category: test
color: ffffff
- name: needs-more-info
description: Blocked until user or author provides further details
category: detail
color: ffffff
- name: needs-new-label
description: New label required to categorise this issue
category: label-admin
color: ffffff
- name: needs-rebase
description: PR contains conflicts which need resolving
category: rebase
color: ffffff
- name: needs-revendor
description: Needs imported code to be re-vendored
category: vendor
color: ffffff
- name: needs-review
description: Needs to be assessed by the team.
category: team
color: 00ff00
- name: needs-unit-tests
description: Needs new unit tests to validate behaviour in this repository
category: test
color: ffffff
- name: os-specific
description: Affects subset of operating system / distro versions
category: environment
color: ffffff
- name: performance
description: System runs too slowly
category: behaviour
color: ffffff
- name: question
description: Requires an answer
category: question
color: ffffff
- name: refactor
description: Remove duplication, improve organisation, etc
category: cleanup
color: ffffff
- name: regression
description: Behaviour inadvertently reverted to older behaviour
category: sub-type
color: ffffff
- name: related/containerd
description: Containerd
category: related
color: ffffff
- name: related/cri
description: CRI
category: related
color: ffffff
- name: related/crio
description: CRIO
category: related
color: ffffff
- name: related/docker
description: Docker
category: related
color: ffffff
- name: related/firecracker
description: Firecracker
category: related
color: ffffff
- name: related/k8s
description: Kubernetes
category: related
color: ffffff
- name: related/qemu
description: QEMU
category: related
color: ffffff
- name: related/runc
description: Runc
category: related
color: ffffff
- name: release-gating
description: Release must wait for this to be resolved before release
category: release
color: ffffff
- name: resource-hog
description: System uses too many resources (such as memory)
category: behaviour
color: ffffff
- name: resource-leak
description: System does not free resources (such as memory)
category: behaviour
color: ffffff
- name: rfc
description: Requires input from the team
category: question
color: ffffff
- name: security
description: Potential or actual security issue
category: security
color: ff0000
- name: size/huge
description: |
Largest and most complex task (probably needs breaking into small
pieces)
category: sizing
color: ffffff
- name: size/large
description: Task of significant size
category: sizing
color: ffffff
- name: size/medium
description: Average sized task
category: sizing
color: ffffff
- name: size/small
description: Small and simple task
category: sizing
color: ffffff
- name: size/tiny
description: Smallest and simplest task
category: sizing
color: ffffff
- name: stale
description: Issue or PR was not updated in a timely fashion
category: resolution
color: ffffff
- name: team/ci
description: Need Continuous Integration Team input
category: team
color: ffffff
- name: team/developer
description: Need Developer Team input
category: team
color: ffffff
- name: team/documentation
description: Need Documentation Team input
category: team
color: ffffff
- name: team/kernel
description: Need Kernel Team input
category: team
color: ffffff
- name: team/metrics
description: Need Metrics Team input
category: team
color: ffffff
- name: team/packaging
description: Need Packaging Team input
category: team
color: ffffff
- name: team/test
description: Need Test Team input
category: team
color: ffffff
- name: unreliable
description: Part of the system is not stable
category: behaviour
color: ffffff
- name: wip
description: Work in Progress (PR incomplete - needs more work or rework)
category: block
color: ff0000
- name: wont-fix
description: Issue will not be fixed (not a good use of limited resources)
category: resolution
color: ffffff
- name: wrong-repo
description: Raised in incorrect repository
category: resolution
color: ffffff

View File

@ -0,0 +1,555 @@
# Copyright (c) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
---
description: |
This file contains a list of all the generic GitHub labels used by all Kata
Containers GitHub repositories.
Each repository can optionally contain a top-level `labels.yaml` that
specifies a list of repository-specific labels (and possibly additional
categories). The labels in the repository-specific labels file plus the
labels defined in this file define the minimum list of labels for the
repository in question.
Each label must specify:
- Name (which must be lower-case without spaces)
- Description
- Category
- Colour (explicit colour, or `DEFAULT_COLOUR`)
A label may also specify a "From" value. This is used for renaming labels;
if a label has an associated "From" value, an existing label whose name is
specified by the "From" value will be renamed to the label name.
A category is a collective name used to describe one or more related labels.
Each category must specify:
- Name (which must be lower-case without spaces)
- Description
A category may also specify a related URL which points to a document
containing further information.
categories:
- name: api
description: Change related to an Application Programming Interface.
- name: architecture-committee
description: Needs input from the Architecture Committee.
url: https://github.com/kata-containers/community#architecture-committee
- name: area
description: Code component / general part of product affected.
- name: backport
description: |
Code that needs to be applied to other branches, generally older stable
ones.
- name: behaviour
description: |
How the issue affect the operation of the system. A more precise version
of regression.
- name: block
description: |
Stop a PR from being merged.
- name: cleanup
description: Refactoring, restructuring or general tidy-up needed.
- name: customer
description: Related to a customer.
- name: design
description: Requires formal review on the approach to solving the problem.
- name: detail
description: Need further information from the user or author.
- name: documentation
description: Needs more documentation.
- name: environment
description: Related to particular system environment.
- name: help
description: |
Request for technical help / extra resource. Also used for assisted
workflow.
- name: label-admin
description: Relates to the administration of labels.
- name: limitation
description: |
Issue cannot be resolved (too hard/impossible, would be too slow,
insufficient resources, etc).
url: |
https://github.com/kata-containers/kata-containers/blob/main/docs/Documentation-Requirements.md
- name: new-contributor
description: Small, self-contained tasks suitable for newcomers.
url: |
https://github.com/kata-containers/community/blob/main/CONTRIBUTING.md
- name: priority
description: |
Relative urgency (time-critical).
- name: question
description: Needs input from the team.
- name: rebase
description: Code conflicts need to be resolved.
- name: related
description: |
Related project. Base set can be generated from
https://github.com/kata-containers/kata-containers/blob/main/versions.yaml.
- name: release
description: Related to production of new versions.
- name: resolution
description: |
Issue is not (or no longer) valid for some reason. Label specifies
reason for closing.
- name: security
description: Potential or actual vulnerability / CVE.
url: https://github.com/kata-containers/community/blob/main/VMT/VMT.md
- name: severity
description: Relative importance (mission-critical).
- name: sizing
description: Estimate of the complexity of the task (story points).
- name: sub-type
description: More specific detail on the type category.
- name: team
description: Team that needs to analyse the issue.
- name: test
description: New tests needed.
- name: type
description: High-level summary of the issue.
- name: vendor
description: Related to handling imported code.
url: |
https://github.com/kata-containers/community/blob/main/CONTRIBUTING.md#re-vendor-prs
repo: REPO_SLUG
labels:
- name: api-breakage
description: API was broken
category: api
color: ff0000
- name: api-change
description: API change
category: api
color: DEFAULT_COLOUR
- name: architecture-specific
description: Affects subset of architectures
category: environment
color: DEFAULT_COLOUR
- name: area/api
description: Application Programming Interface
category: area
color: DEFAULT_COLOUR
- name: area/cli
description: Command Line Interface (flags/options and arguments)
category: area
color: DEFAULT_COLOUR
- name: area/comms
description: Communications (gRPC, Yamux, etc)
category: area
color: DEFAULT_COLOUR
- name: area/config
description: Configuration
category: area
color: DEFAULT_COLOUR
- name: area/logging
description: Logging
category: area
color: DEFAULT_COLOUR
- name: area/networking
description: Networking
category: area
color: DEFAULT_COLOUR
- name: area/storage
description: Storage
category: area
color: DEFAULT_COLOUR
- name: area/tracing
description: Tracing
category: area
color: DEFAULT_COLOUR
- name: backport
description: Code needs to be applied to older (stable) releases
category: backport
color: DEFAULT_COLOUR
- name: bug
description: Incorrect behaviour
category: type
color: ff0000
- name: cannot-reproduce
description: Issue cannot be recreated
category: resolution
color: DEFAULT_COLOUR
- name: cleanup
description: General tidy-up
category: cleanup
color: DEFAULT_COLOUR
- name: crash
description: Causes part of the system to crash
category: behaviour
color: DEFAULT_COLOUR
- name: customer
description: Relates to a customer
category: customer
color: DEFAULT_COLOUR
- name: data-loss
description: System loses information
category: behaviour
color: DEFAULT_COLOUR
- name: deprecate
description: Highlight a feature that will soon be removed
category: cleanup
color: DEFAULT_COLOUR
- name: do-not-merge
description: PR has problems or depends on another
category: block
color: ff0000
- name: duplicate
description: Same issue as one already reported
category: resolution
color: DEFAULT_COLOUR
- name: enhancement
description: Improvement to an existing feature
category: type
color: DEFAULT_COLOUR
- name: feature
description: New functionality
category: type
color: DEFAULT_COLOUR
- name: good-first-issue
description: Small and simple task for new contributors
category: new-contributor
color: DEFAULT_COLOUR
- name: hang
description: System appears to stop operating or freeze
category: behaviour
color: DEFAULT_COLOUR
- name: high-priority
description: Very urgent issue (resolve quickly)
category: priority
color: ff7f00
- name: high-severity
description: Very important issue
category: severity
color: 00d7ff
- name: highest-priority
description: Critically urgent issue (must be resolved as soon as possible)
category: priority
color: ff0000
- name: highest-severity
description: Extremely important issue
category: severity
color: 00ffff
- name: invalid
description: Issue does not make sense
category: resolution
color: DEFAULT_COLOUR
- name: limitation
description: Issue cannot be resolved
category: limitation
color: DEFAULT_COLOUR
- name: medium-priority
description: Urgent issue (resolve before unprioritised issues)
category: priority
color: ffff00
- name: medium-severity
description: Important issue
category: severity
color: 0000ff
- name: needs-decision
description: Requires input from the Architecture Committee
category: architecture-committee
color: DEFAULT_COLOUR
- name: needs-design-doc
description: Needs a document explaining the design
category: design
color: DEFAULT_COLOUR
- name: needs-design-review
description: Needs a formal design review of the approach
category: design
color: DEFAULT_COLOUR
- name: needs-docs
description: Needs some new or updated documentation
category: documentation
color: DEFAULT_COLOUR
- name: needs-help
description: Request for extra help (technical, resource, etc)
category: help
color: DEFAULT_COLOUR
- name: needs-integration-tests
description: |
Needs new system/integration tests to validate behaviour in the tests
repository
category: test
color: DEFAULT_COLOUR
- name: needs-more-info
description: Blocked until user or author provides further details
category: detail
color: DEFAULT_COLOUR
- name: needs-new-label
description: New label required to categorise this issue
category: label-admin
color: DEFAULT_COLOUR
- name: needs-rebase
description: PR contains conflicts which need resolving
category: rebase
color: DEFAULT_COLOUR
- name: needs-revendor
description: Needs imported code to be re-vendored
category: vendor
color: DEFAULT_COLOUR
- name: needs-review
description: Needs to be assessed by the team.
category: team
color: 00ff00
- name: needs-unit-tests
description: Needs new unit tests to validate behaviour in this repository
category: test
color: DEFAULT_COLOUR
- name: os-specific
description: Affects subset of operating system / distro versions
category: environment
color: DEFAULT_COLOUR
- name: performance
description: System runs too slowly
category: behaviour
color: DEFAULT_COLOUR
- name: question
description: Requires an answer
category: question
color: DEFAULT_COLOUR
- name: refactor
description: Remove duplication, improve organisation, etc
category: cleanup
color: DEFAULT_COLOUR
- name: regression
description: Behaviour inadvertently reverted to older behaviour
category: sub-type
color: DEFAULT_COLOUR
- name: related/containerd
description: Containerd
category: related
color: DEFAULT_COLOUR
- name: related/cri
description: CRI
category: related
color: DEFAULT_COLOUR
- name: related/crio
description: CRIO
category: related
color: DEFAULT_COLOUR
- name: related/docker
description: Docker
category: related
color: DEFAULT_COLOUR
- name: related/firecracker
description: Firecracker
category: related
color: DEFAULT_COLOUR
- name: related/k8s
description: Kubernetes
category: related
color: DEFAULT_COLOUR
- name: related/qemu
description: QEMU
category: related
color: DEFAULT_COLOUR
- name: related/runc
description: Runc
category: related
color: DEFAULT_COLOUR
- name: release-gating
description: Release must wait for this to be resolved before release
category: release
color: DEFAULT_COLOUR
- name: resource-hog
description: System uses too many resources (such as memory)
category: behaviour
color: DEFAULT_COLOUR
- name: resource-leak
description: System does not free resources (such as memory)
category: behaviour
color: DEFAULT_COLOUR
- name: rfc
description: Requires input from the team
category: question
color: DEFAULT_COLOUR
- name: security
description: Potential or actual security issue
category: security
color: ff0000
- name: size/huge
description: |
Largest and most complex task (probably needs breaking into small
pieces)
category: sizing
color: DEFAULT_COLOUR
- name: size/large
description: Task of significant size
category: sizing
color: DEFAULT_COLOUR
- name: size/medium
description: Average sized task
category: sizing
color: DEFAULT_COLOUR
- name: size/small
description: Small and simple task
category: sizing
color: DEFAULT_COLOUR
- name: size/tiny
description: Smallest and simplest task
category: sizing
color: DEFAULT_COLOUR
- name: stale
description: Issue or PR was not updated in a timely fashion
category: resolution
color: DEFAULT_COLOUR
- name: team/ci
description: Need Continuous Integration Team input
category: team
color: DEFAULT_COLOUR
- name: team/developer
description: Need Developer Team input
category: team
color: DEFAULT_COLOUR
- name: team/documentation
description: Need Documentation Team input
category: team
color: DEFAULT_COLOUR
- name: team/kernel
description: Need Kernel Team input
category: team
color: DEFAULT_COLOUR
- name: team/metrics
description: Need Metrics Team input
category: team
color: DEFAULT_COLOUR
- name: team/packaging
description: Need Packaging Team input
category: team
color: DEFAULT_COLOUR
- name: team/test
description: Need Test Team input
category: team
color: DEFAULT_COLOUR
- name: unreliable
description: Part of the system is not stable
category: behaviour
color: DEFAULT_COLOUR
- name: wip
description: Work in Progress (PR incomplete - needs more work or rework)
category: block
color: ff0000
- name: wont-fix
description: Issue will not be fixed (not a good use of limited resources)
category: resolution
color: DEFAULT_COLOUR
- name: wrong-repo
description: Raised in incorrect repository
category: resolution
color: DEFAULT_COLOUR

View File

@ -0,0 +1,157 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
// Description: Program to check and summarise the Kata GitHub
// labels YAML file.
package main
import (
"errors"
"fmt"
"os"
"github.com/urfave/cli"
)
type DataToShow int
const (
showLabels DataToShow = iota
showCategories DataToShow = iota
textFormat = "text"
defaultOutputFormat = textFormat
)
var errNeedYAMLFile = errors.New("need YAML file")
var (
// set by the build
name = ""
version = ""
commit = ""
debug = false
)
var formatFlag = cli.StringFlag{
Name: "format",
Usage: "display in specified format ('help' to show all)",
Value: defaultOutputFormat,
}
func commonHandler(context *cli.Context, what DataToShow, withLabels bool) error {
handlers := NewDisplayHandlers()
format := context.String("format")
if format == "help" {
availableFormats := handlers.Get()
for _, format := range availableFormats {
fmt.Fprintf(outputFile, "%s\n", format)
}
return nil
}
handler := handlers.find(format)
if handler == nil {
return fmt.Errorf("no handler for format %q", format)
}
if context.NArg() == 0 {
return errNeedYAMLFile
}
file := context.Args().Get(0)
return show(file, handler, what, withLabels)
}
func main() {
app := cli.NewApp()
app.Description = "tool to manipulate Kata GitHub labels"
app.Usage = app.Description
app.Version = fmt.Sprintf("%s %s (commit %v)", name, version, commit)
app.Flags = []cli.Flag{
cli.BoolFlag{
Name: "debug, d",
Usage: "enable debug output",
Destination: &debug,
},
}
app.Commands = []cli.Command{
{
Name: "check",
Usage: "Perform tests on the labels database",
Description: "Exit code denotes success",
Action: func(context *cli.Context) error {
if context.NArg() == 0 {
return errNeedYAMLFile
}
file := context.Args().Get(0)
return checkYAML(file)
},
},
{
Name: "show",
Usage: "Display labels database details",
Subcommands: []cli.Command{
{
Name: "categories",
Usage: "Display categories from labels database",
Flags: []cli.Flag{
formatFlag,
cli.BoolFlag{
Name: "with-labels",
Usage: "Add labels in each category to output",
},
},
Action: func(context *cli.Context) error {
withLabels := context.Bool("with-labels")
return commonHandler(context, showCategories, withLabels)
},
},
{
Name: "labels",
Usage: "Display labels from labels database",
Flags: []cli.Flag{
formatFlag,
},
Action: func(context *cli.Context) error {
withLabels := context.Bool("with-labels")
return commonHandler(context, showLabels, withLabels)
},
},
},
},
{
Name: "sort",
Usage: "Sort the specified YAML labels file and write to a new file",
Description: "Can be used to keep the master labels file sorted",
ArgsUsage: "<input-file> <output-file>",
Action: func(context *cli.Context) error {
if context.NArg() != 2 {
return errors.New("need two YAML files: <input-file> <output-file>")
}
from := context.Args().Get(0)
to := context.Args().Get(1)
return sortYAML(from, to)
},
},
}
err := app.Run(os.Args)
if err != nil {
fmt.Fprintf(os.Stderr, "ERROR: %v\n", err)
os.Exit(1)
}
}

View File

@ -0,0 +1,102 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"fmt"
"strings"
)
const (
labelNamesSeparator = ","
)
func labelToRecord(l Label, quote bool) (record []string) {
name := l.Name
category := l.CategoryName
colour := l.Colour
from := l.From
if quote {
name = fmt.Sprintf("`%s`", l.Name)
category = fmt.Sprintf("`%s`", l.CategoryName)
colour = fmt.Sprintf("`%s`", l.Colour)
if from != "" {
from = fmt.Sprintf("`%s`", l.From)
}
}
record = append(record, name)
record = append(record, l.Description)
record = append(record, category)
record = append(record, colour)
record = append(record, from)
return record
}
func labelHeaderRecord() []string {
return []string{
"Name",
"Description",
"Category",
"Colour",
"From",
}
}
func categoryHeaderRecord(showLabels bool) []string {
var fields []string
fields = append(fields, "Name")
fields = append(fields, "Description")
fields = append(fields, "URL")
if showLabels {
fields = append(fields, "Labels")
}
return fields
}
func categoryToRecord(lf *LabelsFile, c Category, showLabels, quote bool) ([]string, error) {
var record []string
name := c.Name
if quote {
name = fmt.Sprintf("`%s`", c.Name)
}
record = append(record, name)
record = append(record, c.Description)
record = append(record, c.URL)
if showLabels {
var labelNames []string
labels, err := getLabelsByCategory(c.Name, lf)
if err != nil {
return nil, err
}
for _, l := range labels {
labelName := l.Name
if quote {
labelName = fmt.Sprintf("`%s`", l.Name)
}
labelNames = append(labelNames, labelName)
}
result := strings.Join(labelNames, labelNamesSeparator)
record = append(record, result)
}
return record, nil
}

View File

@ -0,0 +1,55 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
type Category struct {
Name string
Description string
URL string `yaml:",omitempty"`
}
type Label struct {
Name string
Description string
CategoryName string `yaml:"category"`
Colour string `yaml:"color"`
From string `yaml:",omitempty"`
}
type Categories []Category
func (c Categories) Len() int {
return len(c)
}
func (c Categories) Swap(i, j int) {
c[i], c[j] = c[j], c[i]
}
func (c Categories) Less(i, j int) bool {
return c[i].Name < c[j].Name
}
type Labels []Label
func (l Labels) Len() int {
return len(l)
}
func (l Labels) Swap(i, j int) {
l[i], l[j] = l[j], l[i]
}
func (l Labels) Less(i, j int) bool {
return l[i].Name < l[j].Name
}
type LabelsFile struct {
Description string
Categories Categories
Repo string
Labels Labels
}

View File

@ -0,0 +1,24 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import "errors"
func getLabelsByCategory(categoryName string, lf *LabelsFile) ([]Label, error) {
var labels []Label
if categoryName == "" {
return nil, errors.New("need category name")
}
for _, label := range lf.Labels {
if label.CategoryName == categoryName {
labels = append(labels, label)
}
}
return labels, nil
}

View File

@ -0,0 +1,72 @@
// Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"fmt"
"os"
"sort"
yaml "gopkg.in/yaml.v2"
)
const fileMode os.FileMode = 0600
func readYAML(file string) (*LabelsFile, error) {
bytes, err := os.ReadFile(file)
if err != nil {
return nil, err
}
lf := LabelsFile{}
err = yaml.Unmarshal(bytes, &lf)
if err != nil {
return nil, err
}
sort.Sort(lf.Labels)
sort.Sort(lf.Categories)
clean(&lf)
err = check(&lf)
if err != nil {
return nil, fmt.Errorf("file was not in expected format: %v", err)
}
return &lf, nil
}
func writeYAML(lf *LabelsFile, file string) error {
bytes, err := yaml.Marshal(lf)
if err != nil {
return err
}
return os.WriteFile(file, bytes, fileMode)
}
func checkYAML(file string) error {
// read and check
_, err := readYAML(file)
if err == nil {
fmt.Printf("Checked file %v\n", file)
}
return err
}
func sortYAML(fromFile, toFile string) error {
// read and sort
lf, err := readYAML(fromFile)
if err != nil {
return err
}
return writeYAML(lf, toFile)
}

View File

@ -615,3 +615,64 @@ function arch_to_kernel() {
*) die "unsupported architecture: ${arch}";;
esac
}
# Obtain a list of the files the PR changed.
# Returns the information in format "${filter}\t${file}".
get_pr_changed_file_details_full()
{
# List of filters used to restrict the types of file changes.
# See git-diff-tree(1) for further info.
local filters=""
# Added file
filters+="A"
# Copied file
filters+="C"
# Modified file
filters+="M"
# Renamed file
filters+="R"
git diff-tree \
-r \
--name-status \
--diff-filter="${filters}" \
"origin/${branch}" HEAD
}
# Obtain a list of the files the PR changed, ignoring vendor files.
# Returns the information in format "${filter}\t${file}".
get_pr_changed_file_details()
{
get_pr_changed_file_details_full | grep -v "vendor/"
}
function get_dep_from_yaml_db(){
local versions_file="$1"
local dependency="$2"
[ ! -f "$versions_file" ] && die "cannot find $versions_file"
"${repo_root_dir}/ci/install_yq.sh" >&2
result=$("${GOPATH}/bin/yq" r -X "$versions_file" "$dependency")
[ "$result" = "null" ] && result=""
echo "$result"
}
function get_test_version(){
local dependency="$1"
local db
local cidir
# directory of this script, not the caller
local cidir=$(dirname "${BASH_SOURCE[0]}")
db="${cidir}/../versions.yaml"
get_dep_from_yaml_db "${db}" "${dependency}"
}

25
tests/go.mod Normal file
View File

@ -0,0 +1,25 @@
module github.com/kata-containers/tests
go 1.19
require (
github.com/olekukonko/tablewriter v0.0.6-0.20210304033056-74c60be0ef68
github.com/sirupsen/logrus v1.8.1
github.com/stretchr/testify v1.7.1
github.com/urfave/cli v1.22.0
gopkg.in/russross/blackfriday.v2 v2.0.0-00010101000000-000000000000
gopkg.in/yaml.v2 v2.4.0
)
require (
github.com/cpuguy83/go-md2man v1.0.10 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/mattn/go-runewidth v0.0.13 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/rivo/uniseg v0.2.0 // indirect
github.com/russross/blackfriday v1.6.0 // indirect
golang.org/x/sys v0.0.0-20220429233432-b5fbb4746d32 // indirect
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
)
replace gopkg.in/russross/blackfriday.v2 => github.com/russross/blackfriday/v2 v2.1.0

40
tests/go.sum Normal file
View File

@ -0,0 +1,40 @@
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU=
github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/olekukonko/tablewriter v0.0.6-0.20210304033056-74c60be0ef68 h1:sB6FDvBA1aVDINTWnVSrcJ95fV/QkN6fTJgksZOT8vY=
github.com/olekukonko/tablewriter v0.0.6-0.20210304033056-74c60be0ef68/go.mod h1:8Hf+pH6thup1sPZPD+NLg7d6vbpsdilu9CPIeikvgMQ=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww=
github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/urfave/cli v1.22.0 h1:8nz/RUUotroXnOpYzT/Fy3sBp+2XEbXaY641/s3nbFI=
github.com/urfave/cli v1.22.0/go.mod h1:b3D7uWrF2GilkNgYpgcg6J+JMUw7ehmNkE8sZdliGLc=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20220429233432-b5fbb4746d32 h1:Js08h5hqB5xyWR789+QqueR6sDE8mk+YvpETZ+F6X9Y=
golang.org/x/sys v0.0.0-20220429233432-b5fbb4746d32/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

229
tests/kata-doc-to-script.sh Executable file
View File

@ -0,0 +1,229 @@
#!/bin/bash
license="
#
# Copyright (c) 2018 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
"
set -e
[ -n "$DEBUG" ] && set -x
script_name="${0##*/}"
typeset -r warning="WARNING: Do *NOT* run the generated script without reviewing it carefully first!"
# github markdown markers used to surround a code block. All text within the
# markers is rendered in a fixed font.
typeset -r bash_block_open="\`\`\`bash"
typeset -r block_open="\`\`\`"
typeset -r block_close="\`\`\`"
# GitHub issue templates have a special metadata section at the top delimited
# by this string. See:
#
# https://raw.githubusercontent.com/kata-containers/.github/master/.github/ISSUE_TEMPLATE/bug_report.md
typeset -r metadata_block='---'
# Used to delimit inline code blocks
typeset -r backtick="\`"
# convention used in all documentation to represent a non-privileged users
# shell prompt. All lines starting with this value inside a code block are
# commands the user should run.
typeset -r code_prompt="\$ "
# files are expected to match this regular expression
typeset -r extension_regex="\.md$"
strict="no"
require_commands="no"
check_only="no"
invert="no"
verbose="no"
usage()
{
cat <<EOF
Usage: ${script_name} [options] <markdown-file> [<script-file> [<description>]]
This script will convert a github-flavoured markdown document file into a
bash(1) script to stdout by extracting the bash code blocks.
Options:
-c : check the file but don't create the script (sets exit code).
-h : show this usage.
-i : invert output (remove code blocks and inline code, displaying the
remaining parts of the document). Incompatible with '-c'.
-r : require atleast one command block to be found.
-s : strict mode - perform extra checks.
-v : verbose mode.
Example usage:
$ ${script_name} foo.md foo.md.sh
Notes:
- If a description is specified, it will be added to the script as a
comment.
- <script-file> may be specified as '-' meaning send output to stdout.
Limitations:
- The script is unable to handle embedded code blocks like this:
\`\`\`
\`\`\`bash
\$ echo code in an embedded set of backticks
\`\`\`
\`\`\`
To overcome this issue, ensure that the outer set of backticks are replaced
with an HTML PRE tag:
<pre>
\`\`\`bash
\$ echo code in an embedded set of backticks
\`\`\`
</pre>
This will both render correctly on GitHub and allow this script to remove
the code block.
Note: this solves one problem but introduces another - this script will not
remove the HTML tags.
${warning}
EOF
exit 0
}
die()
{
local msg="$*"
echo "ERROR: $msg" >&2
exit 1
}
script_header()
{
local -r description="$1"
cat <<-EOF
#!/bin/bash
${license}
#----------------------------------------------
# WARNING: Script auto-generated from '$file'.
#
# ${warning}
#----------------------------------------------
#----------------------------------------------
# Description: $description
#----------------------------------------------
# fail the entire script if any simple command fails
set -e
EOF
}
# Convert the specified github-flavoured markdown format file
# into a bash script by extracting the bash blocks.
doc_to_script()
{
file="$1"
outfile="$2"
description="$3"
invert="$4"
[ -n "$file" ] || die "need file"
[ "${check_only}" = "no" ] && [ -z "$outfile" ] && die "need output file"
[ "$outfile" = '-' ] && outfile="/dev/stdout"
if [ "$invert" = "yes" ]
then
# First, remove code blocks.
# Next, remove inline code in backticks.
# Finally, remove a metadata block as used in GitHub issue
# templates.
cat "$file" |\
sed -e "/^[ \>]*${block_open}/,/^[ \>]*${block_close}/d" \
-e "s/${backtick}[^${backtick}]*${backtick}//g" \
-e "/^${metadata_block}$/,/^${metadata_block}$/d" \
> "$outfile"
return
fi
all=$(mktemp)
body=$(mktemp)
cat "$file" |\
sed -n "/^ *${bash_block_open}/,/^ *${block_close}/ p" |\
sed -e "/^ *${block_close}/ d" \
-e "s/^ *${code_prompt}//g" \
-e 's/^ *//g' > "$body"
[ "$require_commands" = "yes" ] && [ ! -s "$body" ] && die "no commands found in file '$file'"
script_header "$description" > "$all"
cat "$body" >> "$all"
# sanity check
[ "$check_only" = "yes" ] && redirect="1>/dev/null 2>/dev/null"
{ local ret; eval bash -n "$all" $redirect; ret=$?; } || true
[ "$ret" -ne 0 ] && die "shell code in file '$file' is not valid"
# create output file
[ "$check_only" = "no" ] && cp "$all" "$outfile"
# clean up
rm -f "$body" "$all"
}
main()
{
while getopts "chirsv" opt
do
case $opt in
c) check_only="yes" ;;
h) usage ;;
i) invert="yes" ;;
r) require_commands="yes" ;;
s) strict="yes" ;;
v) verbose="yes" ;;
esac
done
shift $(($OPTIND - 1))
file="$1"
outfile="$2"
description="$3"
[ -n "$file" ] || die "need file"
[ "$verbose" = "yes" ] && echo "INFO: processing file '$file'"
if [ "$strict" = "yes" ]
then
echo "$file"|grep -q "$extension_regex" ||\
die "file '$file' doesn't match pattern '$extension_regex'"
fi
doc_to_script "$file" "$outfile" "$description" "$invert"
}
main "$@"

1397
tests/static-checks.sh Executable file

File diff suppressed because it is too large Load Diff

View File

@ -249,6 +249,11 @@ externals:
url: "http://ftp.gnu.org/pub/gnu/gperf/"
version: "3.1"
hadolint:
description: "the dockerfile linter used by static-checks"
url: "https://github.com/hadolint/hadolint"
version: "2.12.0"
lvm2:
description: "LVM2 and device-mapper tools and libraries"
url: "https://github.com/lvmteam/lvm2"
@ -343,6 +348,12 @@ externals:
# yamllint disable-line rule:line-length
binary: "https://gitlab.com/virtio-fs/virtiofsd/uploads/9ec473efd0203219d016e66aac4190aa/virtiofsd-v1.8.0.zip"
xurls:
description: |
Tool used by the CI to check URLs in documents and code comments.
url: "mvdan.cc/xurls/v2/cmd/xurls"
version: "v2.5.0"
languages:
description: |
Details of programming languages required to build system
@ -371,6 +382,7 @@ languages:
golangci-lint:
description: "golangci-lint"
notes: "'version' is the default minimum version used by this project."
url: "github.com/golangci/golangci-lint"
version: "1.50.1"
meta:
description: |