Vendor gazelle

This commit is contained in:
Tim Hockin 2017-12-22 16:49:04 -08:00
parent 4685df26dd
commit 3e583de0ac
106 changed files with 18281 additions and 1975 deletions

75
Godeps/Godeps.json generated
View File

@ -7,6 +7,7 @@
"github.com/jteeuwen/go-bindata/go-bindata",
"github.com/tools/godep",
"github.com/client9/misspell/cmd/misspell",
"github.com/bazelbuild/bazel-gazelle/cmd/gazelle",
"./..."
],
"Deps": [
@ -330,6 +331,71 @@
"Comment": "v1.12.7",
"Rev": "760741802ad40f49ae9fc4a69ef6706d2527d62e"
},
{
"ImportPath": "github.com/bazelbuild/bazel-gazelle/cmd/gazelle",
"Comment": "0.10.1",
"Rev": "ff4d67f395b3f4d1dc9810eeceb61585fea252cb"
},
{
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/config",
"Comment": "0.10.1",
"Rev": "ff4d67f395b3f4d1dc9810eeceb61585fea252cb"
},
{
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/label",
"Comment": "0.10.1",
"Rev": "ff4d67f395b3f4d1dc9810eeceb61585fea252cb"
},
{
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/merger",
"Comment": "0.10.1",
"Rev": "ff4d67f395b3f4d1dc9810eeceb61585fea252cb"
},
{
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/packages",
"Comment": "0.10.1",
"Rev": "ff4d67f395b3f4d1dc9810eeceb61585fea252cb"
},
{
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/pathtools",
"Comment": "0.10.1",
"Rev": "ff4d67f395b3f4d1dc9810eeceb61585fea252cb"
},
{
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/repos",
"Comment": "0.10.1",
"Rev": "ff4d67f395b3f4d1dc9810eeceb61585fea252cb"
},
{
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/resolve",
"Comment": "0.10.1",
"Rev": "ff4d67f395b3f4d1dc9810eeceb61585fea252cb"
},
{
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/rules",
"Comment": "0.10.1",
"Rev": "ff4d67f395b3f4d1dc9810eeceb61585fea252cb"
},
{
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/version",
"Comment": "0.10.1",
"Rev": "ff4d67f395b3f4d1dc9810eeceb61585fea252cb"
},
{
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/wspace",
"Comment": "0.10.1",
"Rev": "ff4d67f395b3f4d1dc9810eeceb61585fea252cb"
},
{
"ImportPath": "github.com/bazelbuild/buildtools/build",
"Comment": "0.6.0-60-g1a9c38e",
"Rev": "1a9c38e0df9397d033a1ca535596de5a7c1cf18f"
},
{
"ImportPath": "github.com/bazelbuild/buildtools/tables",
"Comment": "0.6.0-60-g1a9c38e",
"Rev": "1a9c38e0df9397d033a1ca535596de5a7c1cf18f"
},
{
"ImportPath": "github.com/beorn7/perks/quantile",
"Rev": "3ac7bf7a47d159a033b107610db8a1b6575507a4"
@ -2455,15 +2521,10 @@
"ImportPath": "github.com/pborman/uuid",
"Rev": "ca53cad383cad2479bbba7f7a1a05797ec1386e4"
},
{
"ImportPath": "github.com/pelletier/go-buffruneio",
"Comment": "v0.1.0",
"Rev": "df1e16fde7fc330a0ca68167c23bf7ed6ac31d6d"
},
{
"ImportPath": "github.com/pelletier/go-toml",
"Comment": "v0.3.5-10-g0049ab3",
"Rev": "0049ab3dc4c4c70a9eee23087437b69c0dde2130"
"Comment": "v1.0.1",
"Rev": "16398bac157da96aa88f98a2df640c7f32af1da2"
},
{
"ImportPath": "github.com/peterbourgon/diskv",

2427
Godeps/LICENSES generated

File diff suppressed because it is too large Load Diff

View File

@ -1,18 +0,0 @@
package(default_visibility = ["//visibility:public"])
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [
":package-srcs",
"//api/openapi-spec:all-srcs",
"//api/swagger-spec:all-srcs",
],
tags = ["automanaged"],
)

View File

@ -58,7 +58,8 @@ filegroup(
name = "all-srcs",
srcs = [
":package-srcs",
"//api:all-srcs",
"//api/openapi-spec:all-srcs",
"//api/swagger-spec:all-srcs",
"//build:all-srcs",
"//cluster:all-srcs",
"//cmd:all-srcs",

View File

@ -59,6 +59,7 @@ REQUIRED_BINS=(
"github.com/jteeuwen/go-bindata/go-bindata"
"github.com/tools/godep"
"github.com/client9/misspell/cmd/misspell"
"github.com/bazelbuild/bazel-gazelle/cmd/gazelle"
"./..."
)

View File

@ -26,13 +26,17 @@ kube::util::ensure-gnu-sed
# TODO(spxtr): Remove this line once Bazel is the only way to build.
rm -f "${KUBE_ROOT}/pkg/generated/openapi/zz_generated.openapi.go"
# Ensure that we find the binaries we build before anything else.
export GOBIN="${KUBE_OUTPUT_BINPATH}"
PATH="${GOBIN}:${PATH}"
# Install tools we need, but only from vendor/...
go install ./vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle
# The git commit sha1s here should match the values in $KUBE_ROOT/WORKSPACE.
kube::util::go_install_from_commit \
github.com/kubernetes/repo-infra/kazel \
97099dccc8807e9159dc28f374a8f0602cab07e1
kube::util::go_install_from_commit \
github.com/bazelbuild/bazel-gazelle/cmd/gazelle \
0.10.1
touch "${KUBE_ROOT}/vendor/BUILD"

View File

@ -40,9 +40,9 @@ mkdir -p "${_tmp_kuberoot}/.."
cp -a "${KUBE_ROOT}" "${_tmp_kuberoot}/.."
cd "${_tmp_kuberoot}"
GOPATH="${_tmp_gopath}" ./hack/update-bazel.sh
GOPATH="${_tmp_gopath}" PATH="${_tmp_gopath}/bin:${PATH}" ./hack/update-bazel.sh
diff=$(diff -Naupr "${KUBE_ROOT}" "${_tmp_kuberoot}" || true)
diff=$(diff -Naupr -x '_output' "${KUBE_ROOT}" "${_tmp_kuberoot}" || true)
if [[ -n "${diff}" ]]; then
echo "${diff}" >&2

View File

@ -64,6 +64,7 @@ _kubetmp="${_kubetmp}/kubernetes"
# Do all our work in the new GOPATH
export GOPATH="${_tmpdir}"
export PATH="${GOPATH}/bin:${PATH}"
pushd "${_kubetmp}" > /dev/null 2>&1
# Restore the Godeps into our temp directory

View File

@ -20,15 +20,12 @@ set -o pipefail
export KUBE_ROOT=$(dirname "${BASH_SOURCE}")/..
source "${KUBE_ROOT}/hack/lib/init.sh"
# Ensure that we find the binaries we build before anything else.
export GOBIN="${KUBE_OUTPUT_BINPATH}"
PATH="${GOBIN}:${PATH}"
# Install tools we need, but only from vendor/...
cd ${KUBE_ROOT}
go install ./vendor/github.com/client9/misspell/cmd/misspell
if ! which misspell >/dev/null 2>&1; then
echo "Can't find misspell - is your GOPATH 'bin' in your PATH?" >&2
echo " GOPATH: ${GOPATH}" >&2
echo " PATH: ${PATH}" >&2
exit 1
fi
# Spell checking
# All the skipping files are defined in hack/.spelling_failures

14
vendor/BUILD vendored
View File

@ -44,6 +44,19 @@ filegroup(
"//vendor/github.com/aws/aws-sdk-go/service/elbv2:all-srcs",
"//vendor/github.com/aws/aws-sdk-go/service/kms:all-srcs",
"//vendor/github.com/aws/aws-sdk-go/service/sts:all-srcs",
"//vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle:all-srcs",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:all-srcs",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:all-srcs",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/merger:all-srcs",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/packages:all-srcs",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:all-srcs",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/repos:all-srcs",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve:all-srcs",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/rules:all-srcs",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/version:all-srcs",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace:all-srcs",
"//vendor/github.com/bazelbuild/buildtools/build:all-srcs",
"//vendor/github.com/bazelbuild/buildtools/tables:all-srcs",
"//vendor/github.com/beorn7/perks/quantile:all-srcs",
"//vendor/github.com/blang/semver:all-srcs",
"//vendor/github.com/chai2010/gettext-go/gettext:all-srcs",
@ -304,7 +317,6 @@ filegroup(
"//vendor/github.com/opencontainers/runtime-spec/specs-go:all-srcs",
"//vendor/github.com/opencontainers/selinux/go-selinux:all-srcs",
"//vendor/github.com/pborman/uuid:all-srcs",
"//vendor/github.com/pelletier/go-buffruneio:all-srcs",
"//vendor/github.com/pelletier/go-toml:all-srcs",
"//vendor/github.com/peterbourgon/diskv:all-srcs",
"//vendor/github.com/pkg/errors:all-srcs",

18
vendor/github.com/bazelbuild/bazel-gazelle/AUTHORS generated vendored Normal file
View File

@ -0,0 +1,18 @@
# This is the official list of authors for copyright purposes.
# Names should be added to this file as:
# Name or Organization <email address>
# The email address is not required for organizations.
Andy Hochhaus <hochhaus@users.noreply.github.com>
Antoine Pelisse <apelisse@gmail.com>
GinFungYJF <645116215@qq.com>
Google Inc.
Improbable Worlds Ltd
Jeff Hodges <jeff@somethingsimilar.com>
John Millikin <jmillikin@gmail.com>
Melinda Lu <melinda@vsco.co>
Peter McAlpine <peter@aoeu.ca>
RS <sayrer@gmail.com>
Rodrigo Queiro <overdrigzed@gmail.com>
Tom Payne <twpayne@gmail.com>
Yuki Yugui Sonoda <yugui@yugui.jp>

View File

@ -0,0 +1,29 @@
# People who have agreed to one of the CLAs and can contribute patches.
# The AUTHORS file lists the copyright holders; this file
# lists people. For example, Google employees are listed here
# but not in AUTHORS, because Google holds the copyright.
#
# https://developers.google.com/open-source/cla/individual
# https://developers.google.com/open-source/cla/corporate
#
# Names should be added to this file as:
# Name <email address>
Ainsley Escorce-Jones <ains@users.noreply.github.com>
Andy Hochhaus <hochhaus@users.noreply.github.com>
Antoine Pelisse <apelisse@gmail.com>
GinFungYJF <645116215@qq.com>
Ian Cottrell <ian.the.hat@gmail.com>
Jay Conrod <jayconrod@gmail.com>
Jeff Grafton <ixdy@users.noreply.github.com>
Jeff Hodges <jeff@somethingsimilar.com>
John Millikin <jmillikin@gmail.com>
Kristina <k.chodorow@gmail.com>
Melinda Lu <melinda@vsco.co>
Paul Bethe <pbethe@google.com>
Peter McAlpine <peter@aoeu.ca>
Rodrigo Queiro <overdrigzed@gmail.com>
RS <sayrer@gmail.com>
Stefan Sakalik <stefan@improbable.io>
Tom Payne <twpayne@gmail.com>
Yuki Yugui Sonoda <yugui@yugui.jp>

202
vendor/github.com/bazelbuild/bazel-gazelle/LICENSE generated vendored Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,49 @@
load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library")
go_library(
name = "go_default_library",
srcs = [
"diff.go",
"fix.go",
"fix-update.go",
"flags.go",
"gazelle.go",
"print.go",
"update-repos.go",
"version.go",
],
importpath = "github.com/bazelbuild/bazel-gazelle/cmd/gazelle",
visibility = ["//visibility:private"],
deps = [
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/merger:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/packages:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/repos:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/rules:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/version:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace:go_default_library",
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
],
)
go_binary(
name = "gazelle",
embed = [":go_default_library"],
visibility = ["//visibility:public"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,55 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"bytes"
"io/ioutil"
"os"
"os/exec"
"github.com/bazelbuild/bazel-gazelle/internal/config"
bf "github.com/bazelbuild/buildtools/build"
)
func diffFile(c *config.Config, file *bf.File, path string) error {
oldContents, err := ioutil.ReadFile(file.Path)
if err != nil {
oldContents = nil
}
newContents := bf.Format(file)
if bytes.Equal(oldContents, newContents) {
return nil
}
f, err := ioutil.TempFile("", c.DefaultBuildFileName())
if err != nil {
return err
}
f.Close()
defer os.Remove(f.Name())
if err := ioutil.WriteFile(f.Name(), newContents, 0666); err != nil {
return err
}
cmd := exec.Command("diff", "-u", "--new-file", path, f.Name())
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err = cmd.Run()
if _, ok := err.(*exec.ExitError); ok {
// diff returns non-zero when files are different. This is not an error.
return nil
}
return err
}

View File

@ -0,0 +1,424 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"errors"
"flag"
"fmt"
"io/ioutil"
"log"
"os"
"path/filepath"
"strings"
"github.com/bazelbuild/bazel-gazelle/internal/config"
"github.com/bazelbuild/bazel-gazelle/internal/label"
"github.com/bazelbuild/bazel-gazelle/internal/merger"
"github.com/bazelbuild/bazel-gazelle/internal/packages"
"github.com/bazelbuild/bazel-gazelle/internal/repos"
"github.com/bazelbuild/bazel-gazelle/internal/resolve"
"github.com/bazelbuild/bazel-gazelle/internal/rules"
"github.com/bazelbuild/bazel-gazelle/internal/wspace"
bf "github.com/bazelbuild/buildtools/build"
)
// updateConfig holds configuration information needed to run the fix and
// update commands. This includes everything in config.Config, but it also
// includes some additional fields that aren't relevant to other packages.
type updateConfig struct {
c *config.Config
emit emitFunc
outDir, outSuffix string
repos []repos.Repo
}
type emitFunc func(*config.Config, *bf.File, string) error
var modeFromName = map[string]emitFunc{
"print": printFile,
"fix": fixFile,
"diff": diffFile,
}
// visitRecord stores information about about a directory visited with
// packages.Walk.
type visitRecord struct {
// pkgRel is the slash-separated path to the visited directory, relative to
// the repository root. "" for the repository root itself.
pkgRel string
// rules is a list of generated Go rules.
rules []bf.Expr
// empty is a list of empty Go rules that may be deleted.
empty []bf.Expr
// file is the build file being processed.
file *bf.File
}
type byPkgRel []visitRecord
func (vs byPkgRel) Len() int { return len(vs) }
func (vs byPkgRel) Less(i, j int) bool { return vs[i].pkgRel < vs[j].pkgRel }
func (vs byPkgRel) Swap(i, j int) { vs[i], vs[j] = vs[j], vs[i] }
func runFixUpdate(cmd command, args []string) error {
uc, err := newFixUpdateConfiguration(cmd, args)
if err != nil {
return err
}
if cmd == fixCmd {
// Only check the version when "fix" is run. Generated build files
// frequently work with older version of rules_go, and we don't want to
// nag too much since there's no way to disable this warning.
checkRulesGoVersion(uc.c.RepoRoot)
}
l := label.NewLabeler(uc.c)
ruleIndex := resolve.NewRuleIndex()
var visits []visitRecord
// Visit all directories in the repository.
packages.Walk(uc.c, uc.c.RepoRoot, func(dir, rel string, c *config.Config, pkg *packages.Package, file *bf.File, isUpdateDir bool) {
// If this file is ignored or if Gazelle was not asked to update this
// directory, just index the build file and move on.
if !isUpdateDir {
if file != nil {
ruleIndex.AddRulesFromFile(c, file)
}
return
}
// Fix any problems in the file.
if file != nil {
file = merger.FixFileMinor(c, file)
fixedFile := merger.FixFile(c, file)
if cmd == fixCmd {
file = fixedFile
} else if fixedFile != file {
log.Printf("%s: warning: file contains rules whose structure is out of date. Consider running 'gazelle fix'.", file.Path)
}
}
// If the file exists, but no Go code is present, create an empty package.
// This lets us delete existing rules.
if pkg == nil && file != nil {
pkg = packages.EmptyPackage(c, dir, rel)
}
// Generate new rules and merge them into the existing file (if present).
if pkg != nil {
g := rules.NewGenerator(c, l, file)
rules, empty, err := g.GenerateRules(pkg)
if err != nil {
log.Print(err)
return
}
if file == nil {
file = &bf.File{
Path: filepath.Join(c.RepoRoot, filepath.FromSlash(rel), c.DefaultBuildFileName()),
Stmt: rules,
}
} else {
file, rules = merger.MergeFile(rules, empty, file, merger.PreResolveAttrs)
}
visits = append(visits, visitRecord{
pkgRel: rel,
rules: rules,
empty: empty,
file: file,
})
}
// Add library rules to the dependency resolution table.
if file != nil {
ruleIndex.AddRulesFromFile(c, file)
}
})
// Finish building the index for dependency resolution.
ruleIndex.Finish()
// Resolve dependencies.
rc := repos.NewRemoteCache(uc.repos)
resolver := resolve.NewResolver(uc.c, l, ruleIndex, rc)
for i := range visits {
for j := range visits[i].rules {
visits[i].rules[j] = resolver.ResolveRule(visits[i].rules[j], visits[i].pkgRel)
}
visits[i].file, _ = merger.MergeFile(visits[i].rules, visits[i].empty, visits[i].file, merger.PostResolveAttrs)
}
// Emit merged files.
for _, v := range visits {
rules.SortLabels(v.file)
v.file = merger.FixLoads(v.file)
bf.Rewrite(v.file, nil) // have buildifier 'format' our rules.
path := v.file.Path
if uc.outDir != "" {
stem := filepath.Base(v.file.Path) + uc.outSuffix
path = filepath.Join(uc.outDir, v.pkgRel, stem)
}
if err := uc.emit(uc.c, v.file, path); err != nil {
log.Print(err)
}
}
return nil
}
func newFixUpdateConfiguration(cmd command, args []string) (*updateConfig, error) {
uc := &updateConfig{c: &config.Config{}}
var err error
fs := flag.NewFlagSet("gazelle", flag.ContinueOnError)
// Flag will call this on any parse error. Don't print usage unless
// -h or -help were passed explicitly.
fs.Usage = func() {}
knownImports := multiFlag{}
buildFileName := fs.String("build_file_name", "BUILD.bazel,BUILD", "comma-separated list of valid build file names.\nThe first element of the list is the name of output build files to generate.")
buildTags := fs.String("build_tags", "", "comma-separated list of build tags. If not specified, Gazelle will not\n\tfilter sources with build constraints.")
external := fs.String("external", "external", "external: resolve external packages with go_repository\n\tvendored: resolve external packages as packages in vendor/")
var goPrefix explicitFlag
fs.Var(&goPrefix, "go_prefix", "prefix of import paths in the current workspace")
repoRoot := fs.String("repo_root", "", "path to a directory which corresponds to go_prefix, otherwise gazelle searches for it.")
fs.Var(&knownImports, "known_import", "import path for which external resolution is skipped (can specify multiple times)")
mode := fs.String("mode", "fix", "print: prints all of the updated BUILD files\n\tfix: rewrites all of the BUILD files in place\n\tdiff: computes the rewrite but then just does a diff")
outDir := fs.String("experimental_out_dir", "", "write build files to an alternate directory tree")
outSuffix := fs.String("experimental_out_suffix", "", "extra suffix appended to build file names. Only used if -experimental_out_dir is also set.")
var proto explicitFlag
fs.Var(&proto, "proto", "default: generates new proto rules\n\tdisable: does not touch proto rules\n\tlegacy (deprecated): generates old proto rules")
if err := fs.Parse(args); err != nil {
if err == flag.ErrHelp {
fixUpdateUsage(fs)
os.Exit(0)
}
// flag already prints the error; don't print it again.
log.Fatal("Try -help for more information.")
}
uc.c.Dirs = fs.Args()
if len(uc.c.Dirs) == 0 {
uc.c.Dirs = []string{"."}
}
for i := range uc.c.Dirs {
uc.c.Dirs[i], err = filepath.Abs(uc.c.Dirs[i])
if err != nil {
return nil, err
}
}
if *repoRoot != "" {
uc.c.RepoRoot = *repoRoot
} else if len(uc.c.Dirs) == 1 {
uc.c.RepoRoot, err = wspace.Find(uc.c.Dirs[0])
if err != nil {
return nil, fmt.Errorf("-repo_root not specified, and WORKSPACE cannot be found: %v", err)
}
} else {
uc.c.RepoRoot, err = wspace.Find(".")
if err != nil {
return nil, fmt.Errorf("-repo_root not specified, and WORKSPACE cannot be found: %v", err)
}
}
uc.c.RepoRoot, err = filepath.EvalSymlinks(uc.c.RepoRoot)
if err != nil {
return nil, fmt.Errorf("failed to evaluate symlinks for repo root: %v", err)
}
for _, dir := range uc.c.Dirs {
if !isDescendingDir(dir, uc.c.RepoRoot) {
return nil, fmt.Errorf("dir %q is not a subdirectory of repo root %q", dir, uc.c.RepoRoot)
}
}
uc.c.ValidBuildFileNames = strings.Split(*buildFileName, ",")
if len(uc.c.ValidBuildFileNames) == 0 {
return nil, fmt.Errorf("no valid build file names specified")
}
uc.c.SetBuildTags(*buildTags)
uc.c.PreprocessTags()
if goPrefix.set {
uc.c.GoPrefix = goPrefix.value
} else {
uc.c.GoPrefix, err = loadGoPrefix(uc.c)
if err != nil {
return nil, err
}
}
if err := config.CheckPrefix(uc.c.GoPrefix); err != nil {
return nil, err
}
uc.c.ShouldFix = cmd == fixCmd
uc.c.DepMode, err = config.DependencyModeFromString(*external)
if err != nil {
return nil, err
}
if proto.set {
uc.c.ProtoMode, err = config.ProtoModeFromString(proto.value)
if err != nil {
return nil, err
}
uc.c.ProtoModeExplicit = true
}
emit, ok := modeFromName[*mode]
if !ok {
return nil, fmt.Errorf("unrecognized emit mode: %q", *mode)
}
uc.emit = emit
uc.outDir = *outDir
uc.outSuffix = *outSuffix
workspacePath := filepath.Join(uc.c.RepoRoot, "WORKSPACE")
workspaceContent, err := ioutil.ReadFile(workspacePath)
if os.IsNotExist(err) {
workspaceContent = nil
} else if err != nil {
return nil, err
}
workspace, err := bf.Parse(workspacePath, workspaceContent)
if err != nil {
return nil, err
}
uc.repos = repos.ListRepositories(workspace)
repoPrefixes := make(map[string]bool)
for _, r := range uc.repos {
repoPrefixes[r.GoPrefix] = true
}
for _, imp := range knownImports {
if repoPrefixes[imp] {
continue
}
repo := repos.Repo{
Name: label.ImportPathToBazelRepoName(imp),
GoPrefix: imp,
}
uc.repos = append(uc.repos, repo)
}
return uc, nil
}
func fixUpdateUsage(fs *flag.FlagSet) {
fmt.Fprintln(os.Stderr, `usage: gazelle [fix|update] [flags...] [package-dirs...]
The update command creates new build files and update existing BUILD files
when needed.
The fix command also creates and updates build files, and in addition, it may
make potentially breaking updates to usage of rules. For example, it may
delete obsolete rules or rename existing rules.
There are several output modes which can be selected with the -mode flag. The
output mode determines what Gazelle does with updated BUILD files.
fix (default) - write updated BUILD files back to disk.
print - print updated BUILD files to stdout.
diff - diff updated BUILD files against existing files in unified format.
Gazelle accepts a list of paths to Go package directories to process (defaults
to the working directory if none are given). It recursively traverses
subdirectories. All directories must be under the directory specified by
-repo_root; if -repo_root is not given, this is the directory containing the
WORKSPACE file.
FLAGS:
`)
fs.PrintDefaults()
}
func loadBuildFile(c *config.Config, dir string) (*bf.File, error) {
var buildPath string
for _, base := range c.ValidBuildFileNames {
p := filepath.Join(dir, base)
fi, err := os.Stat(p)
if err == nil {
if fi.Mode().IsRegular() {
buildPath = p
break
}
continue
}
if !os.IsNotExist(err) {
return nil, err
}
}
if buildPath == "" {
return nil, os.ErrNotExist
}
data, err := ioutil.ReadFile(buildPath)
if err != nil {
return nil, err
}
return bf.Parse(buildPath, data)
}
func loadGoPrefix(c *config.Config) (string, error) {
f, err := loadBuildFile(c, c.RepoRoot)
if err != nil {
return "", errors.New("-go_prefix not set")
}
for _, d := range config.ParseDirectives(f) {
if d.Key == "prefix" {
return d.Value, nil
}
}
for _, s := range f.Stmt {
c, ok := s.(*bf.CallExpr)
if !ok {
continue
}
l, ok := c.X.(*bf.LiteralExpr)
if !ok {
continue
}
if l.Token != "go_prefix" {
continue
}
if len(c.List) != 1 {
return "", fmt.Errorf("-go_prefix not set, and %s has go_prefix(%v) with too many args", f.Path, c.List)
}
v, ok := c.List[0].(*bf.StringExpr)
if !ok {
return "", fmt.Errorf("-go_prefix not set, and %s has go_prefix(%v) which is not a string", f.Path, bf.FormatString(c.List[0]))
}
return v.Value, nil
}
return "", fmt.Errorf("-go_prefix not set, and no # gazelle:prefix directive found in %s", f.Path)
}
func isDescendingDir(dir, root string) bool {
rel, err := filepath.Rel(root, dir)
if err != nil {
return false
}
if rel == "." {
return true
}
return !strings.HasPrefix(rel, "..")
}

View File

@ -0,0 +1,35 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"io/ioutil"
"os"
"path/filepath"
"github.com/bazelbuild/bazel-gazelle/internal/config"
bf "github.com/bazelbuild/buildtools/build"
)
func fixFile(c *config.Config, file *bf.File, path string) error {
if err := os.MkdirAll(filepath.Dir(path), 0777); err != nil {
return err
}
if err := ioutil.WriteFile(path, bf.Format(file), 0666); err != nil {
return err
}
return nil
}

View File

@ -0,0 +1,51 @@
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import "fmt"
// multiFlag allows repeated string flags to be collected into a slice
type multiFlag []string
func (m *multiFlag) String() string {
if len(*m) == 0 {
return ""
}
return fmt.Sprint(*m)
}
func (m *multiFlag) Set(v string) error {
(*m) = append(*m, v)
return nil
}
// explicitFlag is a string flag that tracks whether it was set.
type explicitFlag struct {
set bool
value string
}
func (f *explicitFlag) Set(value string) error {
f.set = true
f.value = value
return nil
}
func (f *explicitFlag) String() string {
if f == nil {
return ""
}
return f.value
}

View File

@ -0,0 +1,106 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Command gazelle is a BUILD file generator for Go projects.
// See "gazelle --help" for more details.
package main
import (
"fmt"
"log"
"os"
)
type command int
const (
updateCmd command = iota
fixCmd
updateReposCmd
helpCmd
)
var commandFromName = map[string]command{
"fix": fixCmd,
"help": helpCmd,
"update": updateCmd,
"update-repos": updateReposCmd,
}
func main() {
log.SetPrefix("gazelle: ")
log.SetFlags(0) // don't print timestamps
if err := run(os.Args[1:]); err != nil {
log.Fatal(err)
}
}
func run(args []string) error {
cmd := updateCmd
if len(args) == 1 && (args[0] == "-h" || args[0] == "-help" || args[0] == "--help") {
cmd = helpCmd
} else if len(args) > 0 {
c, ok := commandFromName[args[0]]
if ok {
cmd = c
args = args[1:]
}
}
switch cmd {
case fixCmd, updateCmd:
return runFixUpdate(cmd, args)
case helpCmd:
help()
case updateReposCmd:
return updateRepos(args)
default:
log.Panicf("unknown command: %v", cmd)
}
return nil
}
func help() {
fmt.Fprint(os.Stderr, `usage: gazelle <command> [args...]
Gazelle is a BUILD file generator for Go projects. It can create new BUILD files
for a project that follows "go build" conventions, and it can update BUILD files
if they already exist. It can be invoked directly in a project workspace, or
it can be run on an external dependency during the build as part of the
go_repository rule.
Gazelle may be run with one of the commands below. If no command is given,
Gazelle defaults to "update".
update - Gazelle will create new BUILD files or update existing BUILD files
if needed.
fix - in addition to the changes made in update, Gazelle will make potentially
breaking changes. For example, it may delete obsolete rules or rename
existing rules.
update-repos - updates repository rules in the WORKSPACE file. Run with
-h for details.
help - show this message.
For usage information for a specific command, run the command with the -h flag.
For example:
gazelle update -h
Gazelle is under active delevopment, and its interface may change
without notice.
`)
}

View File

@ -0,0 +1,28 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"os"
"github.com/bazelbuild/bazel-gazelle/internal/config"
bf "github.com/bazelbuild/buildtools/build"
)
func printFile(c *config.Config, f *bf.File, _ string) error {
_, err := os.Stdout.Write(bf.Format(f))
return err
}

View File

@ -0,0 +1,176 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"errors"
"flag"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"sync"
"github.com/bazelbuild/bazel-gazelle/internal/merger"
"github.com/bazelbuild/bazel-gazelle/internal/repos"
"github.com/bazelbuild/bazel-gazelle/internal/wspace"
bf "github.com/bazelbuild/buildtools/build"
)
type updateReposFn func(c *updateReposConfiguration, oldFile *bf.File) (*bf.File, error)
type updateReposConfiguration struct {
fn updateReposFn
repoRoot string
lockFilename string
importPaths []string
}
func updateRepos(args []string) error {
c, err := newUpdateReposConfiguration(args)
if err != nil {
return err
}
workspacePath := filepath.Join(c.repoRoot, "WORKSPACE")
content, err := ioutil.ReadFile(workspacePath)
if err != nil {
return fmt.Errorf("error reading %q: %v", workspacePath, err)
}
oldFile, err := bf.Parse(workspacePath, content)
if err != nil {
return fmt.Errorf("error parsing %q: %v", workspacePath, err)
}
mergedFile, err := c.fn(c, oldFile)
if err != nil {
return err
}
mergedFile = merger.FixLoads(mergedFile)
if err := ioutil.WriteFile(mergedFile.Path, bf.Format(mergedFile), 0666); err != nil {
return fmt.Errorf("error writing %q: %v", mergedFile.Path, err)
}
return nil
}
func newUpdateReposConfiguration(args []string) (*updateReposConfiguration, error) {
c := new(updateReposConfiguration)
fs := flag.NewFlagSet("gazelle", flag.ContinueOnError)
// Flag will call this on any parse error. Don't print usage unless
// -h or -help were passed explicitly.
fs.Usage = func() {}
fromFileFlag := fs.String("from_file", "", "Gazelle will translate repositories listed in this file into repository rules in WORKSPACE. Currently only dep's Gopkg.lock is supported.")
repoRootFlag := fs.String("repo_root", "", "path to the root directory of the repository. If unspecified, this is assumed to be the directory containing WORKSPACE.")
if err := fs.Parse(args); err != nil {
if err == flag.ErrHelp {
updateReposUsage(fs)
os.Exit(0)
}
// flag already prints the error; don't print it again.
return nil, errors.New("Try -help for more information")
}
// Handle general flags that apply to all subcommands.
c.repoRoot = *repoRootFlag
if c.repoRoot == "" {
if repoRoot, err := wspace.Find("."); err != nil {
return nil, err
} else {
c.repoRoot = repoRoot
}
}
// Handle flags specific to each subcommand.
switch {
case *fromFileFlag != "":
if len(fs.Args()) != 0 {
return nil, fmt.Errorf("Got %d positional arguments with -from_file; wanted 0.\nTry -help for more information.", len(fs.Args()))
}
c.fn = importFromLockFile
c.lockFilename = *fromFileFlag
default:
if len(fs.Args()) == 0 {
return nil, fmt.Errorf("No repositories specified\nTry -help for more information.")
}
c.fn = updateImportPaths
c.importPaths = fs.Args()
}
return c, nil
}
func updateReposUsage(fs *flag.FlagSet) {
fmt.Fprintln(os.Stderr, `usage:
# Add/update repositories by import path
gazelle update-repos example.com/repo1 example.com/repo2
# Import repositories from lock file
gazelle update-repos -from_file=file
The update-repos command updates repository rules in the WORKSPACE file.
update-repos can add or update repositories explicitly by import path.
update-repos can also import repository rules from a vendoring tool's lock
file (currently only deps' Gopkg.lock is supported).
FLAGS:
`)
}
func updateImportPaths(c *updateReposConfiguration, oldFile *bf.File) (*bf.File, error) {
rs := repos.ListRepositories(oldFile)
rc := repos.NewRemoteCache(rs)
genRules := make([]bf.Expr, len(c.importPaths))
errs := make([]error, len(c.importPaths))
var wg sync.WaitGroup
wg.Add(len(c.importPaths))
for i, imp := range c.importPaths {
go func(i int) {
defer wg.Done()
repo, err := repos.UpdateRepo(rc, imp)
if err != nil {
errs[i] = err
return
}
repo.Remote = "" // don't set these explicitly
repo.VCS = ""
rule := repos.GenerateRule(repo)
genRules[i] = rule
}(i)
}
wg.Wait()
for _, err := range errs {
if err != nil {
return nil, err
}
}
mergedFile, _ := merger.MergeFile(genRules, nil, oldFile, merger.RepoAttrs)
return mergedFile, nil
}
func importFromLockFile(c *updateReposConfiguration, oldFile *bf.File) (*bf.File, error) {
genRules, err := repos.ImportRepoRules(c.lockFilename)
if err != nil {
return nil, err
}
mergedFile, _ := merger.MergeFile(genRules, nil, oldFile, merger.RepoAttrs)
return mergedFile, nil
}

View File

@ -0,0 +1,65 @@
/* Copyright 2018 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"io/ioutil"
"log"
"path/filepath"
"regexp"
"github.com/bazelbuild/bazel-gazelle/internal/config"
"github.com/bazelbuild/bazel-gazelle/internal/repos"
"github.com/bazelbuild/bazel-gazelle/internal/version"
)
var minimumRulesGoVersion = version.Version{0, 9, 0}
// checkRulesGoVersion checks whether a compatible version of rules_go is
// being used in the workspace. A message will be logged if an incompatible
// version is found.
//
// Note that we can't always determine the version of rules_go in use. Also,
// if we find an incompatible version, we shouldn't bail out since the
// incompatibility may not matter in the current workspace.
func checkRulesGoVersion(repoRoot string) {
const message = `Gazelle may not be compatible with this version of rules_go.
Update io_bazel_rules_go to a newer version in your WORKSPACE file.`
rulesGoPath, err := repos.FindExternalRepo(repoRoot, config.RulesGoRepoName)
if err != nil {
return
}
defBzlPath := filepath.Join(rulesGoPath, "go", "def.bzl")
defBzlContent, err := ioutil.ReadFile(defBzlPath)
if err != nil {
return
}
versionRe := regexp.MustCompile(`(?m)^RULES_GO_VERSION = ['"]([0-9.]*)['"]`)
match := versionRe.FindSubmatch(defBzlContent)
if match == nil {
log.Printf("RULES_GO_VERSION not found in @%s//go:def.bzl.\n%s", config.RulesGoRepoName, message)
return
}
vstr := string(match[1])
v, err := version.ParseVersion(vstr)
if err != nil {
log.Printf("RULES_GO_VERSION %q could not be parsed in @%s//go:def.bzl.\n%s", vstr, config.RulesGoRepoName, message)
}
if v.Compare(minimumRulesGoVersion) < 0 {
log.Printf("Found RULES_GO_VERSION %s. Minimum compatible version is %s.\n%s", v, minimumRulesGoVersion, message)
}
}

View File

@ -0,0 +1,28 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"config.go",
"constants.go",
"directives.go",
"platform.go",
],
importpath = "github.com/bazelbuild/bazel-gazelle/internal/config",
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
deps = ["//vendor/github.com/bazelbuild/buildtools/build:go_default_library"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,175 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package config
import (
"fmt"
"go/build"
"strings"
)
// Config holds information about how Gazelle should run. This is mostly
// based on command-line arguments.
type Config struct {
// Dirs is a list of absolute paths to directories where Gazelle should run.
Dirs []string
// RepoRoot is the absolute, canonical path to the root directory of the
// repository with all symlinks resolved.
RepoRoot string
// ValidBuildFileNames is a list of base names that are considered valid
// build files. Some repositories may have files named "BUILD" that are not
// used by Bazel and should be ignored. Must contain at least one string.
ValidBuildFileNames []string
// GenericTags is a set of build constraints that are true on all platforms.
// It should not be nil.
GenericTags BuildTags
// GoPrefix is the portion of the import path for the root of this repository.
// This is used to map imports to labels within the repository.
GoPrefix string
// GoPrefixRel is the slash-separated path to the directory where GoPrefix
// was set, relative to the repository root. "" for the repository root.
GoPrefixRel string
// ShouldFix determines whether Gazelle attempts to remove and replace
// usage of deprecated rules.
ShouldFix bool
// DepMode determines how imports outside of GoPrefix are resolved.
DepMode DependencyMode
// ProtoMode determines how rules are generated for protos.
ProtoMode ProtoMode
// ProtoModeExplicit indicates whether the proto mode was set explicitly.
ProtoModeExplicit bool
}
var DefaultValidBuildFileNames = []string{"BUILD.bazel", "BUILD"}
func (c *Config) IsValidBuildFileName(name string) bool {
for _, n := range c.ValidBuildFileNames {
if name == n {
return true
}
}
return false
}
func (c *Config) DefaultBuildFileName() string {
return c.ValidBuildFileNames[0]
}
// BuildTags is a set of build constraints.
type BuildTags map[string]bool
// SetBuildTags sets GenericTags by parsing as a comma separated list. An
// error will be returned for tags that wouldn't be recognized by "go build".
// PreprocessTags should be called after this.
func (c *Config) SetBuildTags(tags string) error {
c.GenericTags = make(BuildTags)
if tags == "" {
return nil
}
for _, t := range strings.Split(tags, ",") {
if strings.HasPrefix(t, "!") {
return fmt.Errorf("build tags can't be negated: %s", t)
}
c.GenericTags[t] = true
}
return nil
}
// PreprocessTags adds some tags which are on by default before they are
// used to match files.
func (c *Config) PreprocessTags() {
if c.GenericTags == nil {
c.GenericTags = make(BuildTags)
}
c.GenericTags["gc"] = true
}
// CheckPrefix checks that a string may be used as a prefix. We forbid local
// (relative) imports and those beginning with "/". We allow the empty string,
// but generated rules must not have an empty importpath.
func CheckPrefix(prefix string) error {
if strings.HasPrefix(prefix, "/") || build.IsLocalImport(prefix) {
return fmt.Errorf("invalid prefix: %q", prefix)
}
return nil
}
// DependencyMode determines how imports of packages outside of the prefix
// are resolved.
type DependencyMode int
const (
// ExternalMode indicates imports should be resolved to external dependencies
// (declared in WORKSPACE).
ExternalMode DependencyMode = iota
// VendorMode indicates imports should be resolved to libraries in the
// vendor directory.
VendorMode
)
// DependencyModeFromString converts a string from the command line
// to a DependencyMode. Valid strings are "external", "vendor". An error will
// be returned for an invalid string.
func DependencyModeFromString(s string) (DependencyMode, error) {
switch s {
case "external":
return ExternalMode, nil
case "vendored":
return VendorMode, nil
default:
return 0, fmt.Errorf("unrecognized dependency mode: %q", s)
}
}
// ProtoMode determines how proto rules are generated.
type ProtoMode int
const (
// DefaultProtoMode generates proto_library and new grpc_proto_library rules.
// .pb.go files are excluded when there is a .proto file with a similar name.
DefaultProtoMode ProtoMode = iota
// DisableProtoMode ignores .proto files. .pb.go files are treated
// as normal sources.
DisableProtoMode
// LegacyProtoMode generates filegroups for .proto files if .pb.go files
// are present in the same directory.
LegacyProtoMode
)
func ProtoModeFromString(s string) (ProtoMode, error) {
switch s {
case "default":
return DefaultProtoMode, nil
case "disable":
return DisableProtoMode, nil
case "legacy":
return LegacyProtoMode, nil
default:
return 0, fmt.Errorf("unrecognized proto mode: %q", s)
}
}

View File

@ -0,0 +1,78 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package config
const (
// RulesGoRepoName is the canonical name of the rules_go repository. It must
// match the workspace name in WORKSPACE.
RulesGoRepoName = "io_bazel_rules_go"
// DefaultLibName is the name of the default go_library rule in a Go
// package directory. It must be consistent to DEFAULT_LIB in go/private/common.bf.
DefaultLibName = "go_default_library"
// DefaultTestName is a name of an internal test corresponding to
// DefaultLibName. It does not need to be consistent to something but it
// just needs to be unique in the Bazel package
DefaultTestName = "go_default_test"
// DefaultXTestName is a name of an external test corresponding to
// DefaultLibName.
DefaultXTestName = "go_default_xtest"
// DefaultProtosName is the name of a filegroup created
// whenever the library contains .pb.go files
DefaultProtosName = "go_default_library_protos"
// DefaultCgoLibName is the name of the default cgo_library rule in a Go package directory.
DefaultCgoLibName = "cgo_default_library"
// GrpcCompilerLabel is the label for the gRPC compiler plugin, used in the
// "compilers" attribute of go_proto_library rules.
GrpcCompilerLabel = "@io_bazel_rules_go//proto:go_grpc"
// WellKnownTypesProtoRepo is the repository containing proto_library rules
// for the Well Known Types.
WellKnownTypesProtoRepo = "com_google_protobuf"
// WellKnownTypesGoProtoRepo is the repository containing go_library rules
// for the Well Known Types.
WellKnownTypesGoProtoRepo = "com_github_golang_protobuf"
// WellKnownTypesGoPrefix is the import path for the Go repository containing
// pre-generated code for the Well Known Types.
WellKnownTypesGoPrefix = "github.com/golang/protobuf"
// GazelleImportsKey is an internal attribute that lists imported packages
// on generated rules. It is replaced with "deps" during import resolution.
GazelleImportsKey = "_gazelle_imports"
)
// Language is the name of a programming langauge that Gazelle knows about.
// This is used to specify import paths.
type Language int
const (
// GoLang marks Go targets.
GoLang Language = iota
// ProtoLang marks protocol buffer targets.
ProtoLang
)
func (l Language) String() string {
switch l {
case GoLang:
return "go"
case ProtoLang:
return "proto"
default:
return "unknown"
}
}

View File

@ -0,0 +1,203 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package config
import (
"log"
"path"
"regexp"
"strings"
bf "github.com/bazelbuild/buildtools/build"
)
// Directive is a key-value pair extracted from a top-level comment in
// a build file. Directives have the following format:
//
// # gazelle:key value
//
// Keys may not contain spaces. Values may be empty and may contain spaces,
// but surrounding space is trimmed.
type Directive struct {
Key, Value string
}
// Top-level directives apply to the whole package or build file. They must
// appear before the first statement.
var knownTopLevelDirectives = map[string]bool{
"build_file_name": true,
"build_tags": true,
"exclude": true,
"prefix": true,
"ignore": true,
"proto": true,
}
// TODO(jayconrod): annotation directives will apply to an individual rule.
// They must appear in the block of comments above that rule.
// ParseDirectives scans f for Gazelle directives. The full list of directives
// is returned. Errors are reported for unrecognized directives and directives
// out of place (after the first statement).
func ParseDirectives(f *bf.File) []Directive {
var directives []Directive
parseComment := func(com bf.Comment) {
match := directiveRe.FindStringSubmatch(com.Token)
if match == nil {
return
}
key, value := match[1], match[2]
if _, ok := knownTopLevelDirectives[key]; !ok {
log.Printf("%s:%d: unknown directive: %s", f.Path, com.Start.Line, com.Token)
return
}
directives = append(directives, Directive{key, value})
}
for _, s := range f.Stmt {
coms := s.Comment()
for _, com := range coms.Before {
parseComment(com)
}
for _, com := range coms.After {
parseComment(com)
}
}
return directives
}
var directiveRe = regexp.MustCompile(`^#\s*gazelle:(\w+)\s*(.*?)\s*$`)
// ApplyDirectives applies directives that modify the configuration to a copy of
// c, which is returned. If there are no configuration directives, c is returned
// unmodified.
func ApplyDirectives(c *Config, directives []Directive, rel string) *Config {
modified := *c
didModify := false
for _, d := range directives {
switch d.Key {
case "build_tags":
if err := modified.SetBuildTags(d.Value); err != nil {
log.Print(err)
modified.GenericTags = c.GenericTags
} else {
modified.PreprocessTags()
didModify = true
}
case "build_file_name":
modified.ValidBuildFileNames = strings.Split(d.Value, ",")
didModify = true
case "prefix":
if err := CheckPrefix(d.Value); err != nil {
log.Print(err)
continue
}
modified.GoPrefix = d.Value
modified.GoPrefixRel = rel
didModify = true
case "proto":
protoMode, err := ProtoModeFromString(d.Value)
if err != nil {
log.Print(err)
continue
}
modified.ProtoMode = protoMode
modified.ProtoModeExplicit = true
didModify = true
}
}
if !didModify {
return c
}
return &modified
}
// InferProtoMode sets Config.ProtoMode, based on the contents of f. If the
// proto mode is already set to something other than the default, or if the mode
// is set explicitly in directives, this function does not change it. If the
// legacy go_proto_library.bzl is loaded, or if this is the Well Known Types
// repository, legacy mode is used. If go_proto_library is loaded from another
// file, proto rule generation is disabled.
func InferProtoMode(c *Config, rel string, f *bf.File, directives []Directive) *Config {
if c.ProtoMode != DefaultProtoMode || c.ProtoModeExplicit {
return c
}
for _, d := range directives {
if d.Key == "proto" {
return c
}
}
if c.GoPrefix == WellKnownTypesGoPrefix {
// Use legacy mode in this repo. We don't need proto_library or
// go_proto_library, since we get that from @com_google_protobuf.
// Legacy rules still refer to .proto files in here, which need are
// exposed by filegroup. go_library rules from .pb.go files will be
// generated, which are depended upon by the new rules.
modified := *c
modified.ProtoMode = LegacyProtoMode
return &modified
}
if path.Base(rel) == "vendor" {
modified := *c
modified.ProtoMode = DisableProtoMode
return &modified
}
if f == nil {
return c
}
mode := DefaultProtoMode
for _, stmt := range f.Stmt {
c, ok := stmt.(*bf.CallExpr)
if !ok {
continue
}
x, ok := c.X.(*bf.LiteralExpr)
if !ok || x.Token != "load" || len(c.List) == 0 {
continue
}
name, ok := c.List[0].(*bf.StringExpr)
if !ok {
continue
}
if name.Value == "@io_bazel_rules_go//proto:def.bzl" {
break
}
if name.Value == "@io_bazel_rules_go//proto:go_proto_library.bzl" {
mode = LegacyProtoMode
break
}
for _, arg := range c.List[1:] {
if sym, ok := arg.(*bf.StringExpr); ok && sym.Value == "go_proto_library" {
mode = DisableProtoMode
break
}
kwarg, ok := arg.(*bf.BinaryExpr)
if !ok || kwarg.Op != "=" {
continue
}
if key, ok := kwarg.X.(*bf.LiteralExpr); ok && key.Token == "go_proto_library" {
mode = DisableProtoMode
break
}
}
}
if mode == DefaultProtoMode || c.ProtoMode == mode || c.ShouldFix && mode == LegacyProtoMode {
return c
}
modified := *c
modified.ProtoMode = mode
return &modified
}

View File

@ -0,0 +1,128 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package config
import (
"sort"
)
// Platform represents a GOOS/GOARCH pair. When Platform is used to describe
// sources, dependencies, or flags, either OS or Arch may be empty.
type Platform struct {
OS, Arch string
}
// String returns OS, Arch, or "OS_Arch" if both are set. This must match
// the names of config_setting rules in @io_bazel_rules_go//go/platform.
func (p Platform) String() string {
switch {
case p.OS != "" && p.Arch != "":
return p.OS + "_" + p.Arch
case p.OS != "":
return p.OS
case p.Arch != "":
return p.Arch
default:
return ""
}
}
// KnownPlatforms is the set of target platforms that Go supports. Gazelle
// will generate multi-platform build files using these tags. rules_go and
// Bazel may not actually support all of these.
var KnownPlatforms = []Platform{
{"android", "386"},
{"android", "amd64"},
{"android", "arm"},
{"android", "arm64"},
{"darwin", "386"},
{"darwin", "amd64"},
{"darwin", "arm"},
{"darwin", "arm64"},
{"dragonfly", "amd64"},
{"freebsd", "386"},
{"freebsd", "amd64"},
{"freebsd", "arm"},
{"linux", "386"},
{"linux", "amd64"},
{"linux", "arm"},
{"linux", "arm64"},
{"linux", "mips"},
{"linux", "mips64"},
{"linux", "mips64le"},
{"linux", "mipsle"},
{"linux", "ppc64"},
{"linux", "ppc64le"},
{"linux", "s390x"},
{"nacl", "386"},
{"nacl", "amd64p32"},
{"nacl", "arm"},
{"netbsd", "386"},
{"netbsd", "amd64"},
{"netbsd", "arm"},
{"openbsd", "386"},
{"openbsd", "amd64"},
{"openbsd", "arm"},
{"plan9", "386"},
{"plan9", "amd64"},
{"plan9", "arm"},
{"solaris", "amd64"},
{"windows", "386"},
{"windows", "amd64"},
}
var (
// KnownOSs is the sorted list of operating systems that Go supports.
KnownOSs []string
// KnownOSSet is the set of operating systems that Go supports.
KnownOSSet map[string]bool
// KnownArchs is the sorted list of architectures that Go supports.
KnownArchs []string
// KnownArchSet is the set of architectures that Go supports.
KnownArchSet map[string]bool
// KnownOSArchs is a map from OS to the archictures they run on.
KnownOSArchs map[string][]string
// KnownArchOSs is a map from architectures to that OSs that run on them.
KnownArchOSs map[string][]string
)
func init() {
KnownOSSet = make(map[string]bool)
KnownArchSet = make(map[string]bool)
KnownOSArchs = make(map[string][]string)
KnownArchOSs = make(map[string][]string)
for _, p := range KnownPlatforms {
KnownOSSet[p.OS] = true
KnownArchSet[p.Arch] = true
KnownOSArchs[p.OS] = append(KnownOSArchs[p.OS], p.Arch)
KnownArchOSs[p.Arch] = append(KnownArchOSs[p.Arch], p.OS)
}
KnownOSs = make([]string, 0, len(KnownOSSet))
KnownArchs = make([]string, 0, len(KnownArchSet))
for os := range KnownOSSet {
KnownOSs = append(KnownOSs, os)
}
for arch := range KnownArchSet {
KnownArchs = append(KnownArchs, arch)
}
sort.Strings(KnownOSs)
sort.Strings(KnownArchs)
}

View File

@ -0,0 +1,29 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"label.go",
"labeler.go",
],
importpath = "github.com/bazelbuild/bazel-gazelle/internal/label",
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
deps = [
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,163 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package label
import (
"fmt"
"log"
"path"
"regexp"
"strings"
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
)
// A Label represents a label of a build target in Bazel.
type Label struct {
Repo, Pkg, Name string
Relative bool
}
func New(repo, pkg, name string) Label {
return Label{Repo: repo, Pkg: pkg, Name: name}
}
// NoLabel is the nil value of Label. It is not a valid label and may be
// returned when an error occurs.
var NoLabel = Label{}
var (
labelRepoRegexp = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9_]*$`)
labelPkgRegexp = regexp.MustCompile(`^[A-Za-z0-9/._-]*$`)
labelNameRegexp = regexp.MustCompile(`^[A-Za-z0-9_/.+=,@~-]*$`)
)
// Parse reads a label from a string.
// See https://docs.bazel.build/versions/master/build-ref.html#lexi.
func Parse(s string) (Label, error) {
origStr := s
relative := true
var repo string
if strings.HasPrefix(s, "@") {
relative = false
endRepo := strings.Index(s, "//")
if endRepo < 0 {
return NoLabel, fmt.Errorf("label parse error: repository does not end with '//': %q", origStr)
}
repo = s[len("@"):endRepo]
if !labelRepoRegexp.MatchString(repo) {
return NoLabel, fmt.Errorf("label parse error: repository has invalid characters: %q", origStr)
}
s = s[endRepo:]
}
var pkg string
if strings.HasPrefix(s, "//") {
relative = false
endPkg := strings.Index(s, ":")
if endPkg < 0 {
pkg = s[len("//"):]
s = ""
} else {
pkg = s[len("//"):endPkg]
s = s[endPkg:]
}
if !labelPkgRegexp.MatchString(pkg) {
return NoLabel, fmt.Errorf("label parse error: package has invalid characters: %q", origStr)
}
}
if s == ":" {
return NoLabel, fmt.Errorf("label parse error: empty name: %q", origStr)
}
name := strings.TrimPrefix(s, ":")
if !labelNameRegexp.MatchString(name) {
return NoLabel, fmt.Errorf("label parse error: name has invalid characters: %q", origStr)
}
if pkg == "" && name == "" {
return NoLabel, fmt.Errorf("label parse error: empty package and name: %q", origStr)
}
if name == "" {
name = path.Base(pkg)
}
return Label{
Repo: repo,
Pkg: pkg,
Name: name,
Relative: relative,
}, nil
}
func (l Label) String() string {
if l.Relative {
return fmt.Sprintf(":%s", l.Name)
}
var repo string
if l.Repo != "" {
repo = fmt.Sprintf("@%s", l.Repo)
}
if path.Base(l.Pkg) == l.Name {
return fmt.Sprintf("%s//%s", repo, l.Pkg)
}
return fmt.Sprintf("%s//%s:%s", repo, l.Pkg, l.Name)
}
func (l Label) Abs(repo, pkg string) Label {
if !l.Relative {
return l
}
return Label{Repo: repo, Pkg: pkg, Name: l.Name}
}
func (l Label) Equal(other Label) bool {
return l.Repo == other.Repo &&
l.Pkg == other.Pkg &&
l.Name == other.Name &&
l.Relative == other.Relative
}
// Contains returns whether other is contained by the package of l or a
// sub-package. Neither label may be relative.
func (l Label) Contains(other Label) bool {
if l.Relative {
log.Panicf("l must not be relative: %s", l)
}
if other.Relative {
log.Panicf("other must not be relative: %s", other)
}
result := l.Repo == other.Repo && pathtools.HasPrefix(other.Pkg, l.Pkg)
return result
}
// ImportPathToBazelRepoName converts a Go import path into a bazel repo name
// following the guidelines in http://bazel.io/docs/be/functions.html#workspace
func ImportPathToBazelRepoName(importpath string) string {
importpath = strings.ToLower(importpath)
components := strings.Split(importpath, "/")
labels := strings.Split(components[0], ".")
var reversed []string
for i := range labels {
l := labels[len(labels)-i-1]
reversed = append(reversed, l)
}
repo := strings.Join(append(reversed, components[1:]...), "_")
return strings.NewReplacer("-", "_", ".", "_").Replace(repo)
}

View File

@ -0,0 +1,58 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package label
import (
"github.com/bazelbuild/bazel-gazelle/internal/config"
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
)
// Labeler generates Bazel labels for rules, based on their locations
// within the repository.
type Labeler struct {
c *config.Config
}
func NewLabeler(c *config.Config) *Labeler {
return &Labeler{c}
}
func (l *Labeler) LibraryLabel(rel string) Label {
return Label{Pkg: rel, Name: config.DefaultLibName}
}
func (l *Labeler) TestLabel(rel string, isXTest bool) Label {
var name string
if isXTest {
name = config.DefaultXTestName
} else {
name = config.DefaultTestName
}
return Label{Pkg: rel, Name: name}
}
func (l *Labeler) BinaryLabel(rel string) Label {
name := pathtools.RelBaseName(rel, l.c.GoPrefix, l.c.RepoRoot)
return Label{Pkg: rel, Name: name}
}
func (l *Labeler) ProtoLabel(rel, name string) Label {
return Label{Pkg: rel, Name: name + "_proto"}
}
func (l *Labeler) GoProtoLabel(rel, name string) Label {
return Label{Pkg: rel, Name: name + "_go_proto"}
}

View File

@ -0,0 +1,30 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"fix.go",
"merger.go",
],
importpath = "github.com/bazelbuild/bazel-gazelle/internal/merger",
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
deps = [
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,665 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package merger
import (
"log"
"sort"
"github.com/bazelbuild/bazel-gazelle/internal/config"
bf "github.com/bazelbuild/buildtools/build"
)
// Much of this file could be simplified by using
// github.com/bazelbuild/buildtools/edit. However, through a transitive
// dependency, that library depends on a proto in Bazel itself, which is
// a 95MB download. Not worth it.
// FixFile updates rules in oldFile that were generated by an older version of
// Gazelle to a newer form that can be merged with freshly generated rules.
//
// FixLoads should be called after this, since it will fix load
// statements that may be broken by transformations applied by this function.
func FixFile(c *config.Config, oldFile *bf.File) *bf.File {
fixedFile := squashCgoLibrary(oldFile)
return removeLegacyProto(c, fixedFile)
}
// squashCgoLibrary removes cgo_library rules with the default name and
// merges their attributes with go_library with the default name. If no
// go_library rule exists, a new one will be created.
//
// Note that the library attribute is disregarded, so cgo_library and
// go_library attributes will be squashed even if the cgo_library was unlinked.
// MergeWithExisting will remove unused values and attributes later.
func squashCgoLibrary(oldFile *bf.File) *bf.File {
// Find the default cgo_library and go_library rules.
var cgoLibrary, goLibrary bf.Rule
cgoLibraryIndex := -1
goLibraryIndex := -1
for i, stmt := range oldFile.Stmt {
c, ok := stmt.(*bf.CallExpr)
if !ok {
continue
}
r := bf.Rule{Call: c}
if r.Kind() == "cgo_library" && r.Name() == config.DefaultCgoLibName && !shouldKeep(c) {
if cgoLibrary.Call != nil {
log.Printf("%s: when fixing existing file, multiple cgo_library rules with default name found", oldFile.Path)
continue
}
cgoLibrary = r
cgoLibraryIndex = i
continue
}
if r.Kind() == "go_library" && r.Name() == config.DefaultLibName {
if goLibrary.Call != nil {
log.Printf("%s: when fixing existing file, multiple go_library rules with default name referencing cgo_library found", oldFile.Path)
continue
}
goLibrary = r
goLibraryIndex = i
}
}
if cgoLibrary.Call == nil {
return oldFile
}
// If go_library has a '# keep' comment, just delete cgo_library.
if goLibrary.Call != nil && shouldKeep(goLibrary.Call) {
fixedFile := *oldFile
fixedFile.Stmt = append(fixedFile.Stmt[:cgoLibraryIndex], fixedFile.Stmt[cgoLibraryIndex+1:]...)
return &fixedFile
}
// Copy the comments and attributes from cgo_library into go_library. If no
// go_library exists, create an empty one.
var fixedGoLibraryExpr bf.CallExpr
fixedGoLibrary := bf.Rule{Call: &fixedGoLibraryExpr}
if goLibrary.Call == nil {
fixedGoLibrary.SetKind("go_library")
fixedGoLibrary.SetAttr("name", &bf.StringExpr{Value: config.DefaultLibName})
if vis := cgoLibrary.Attr("visibility"); vis != nil {
fixedGoLibrary.SetAttr("visibility", vis)
}
} else {
fixedGoLibraryExpr = *goLibrary.Call
fixedGoLibraryExpr.List = append([]bf.Expr{}, goLibrary.Call.List...)
}
fixedGoLibrary.DelAttr("embed")
fixedGoLibrary.SetAttr("cgo", &bf.LiteralExpr{Token: "True"})
fixedGoLibraryExpr.Comments.Before = append(fixedGoLibraryExpr.Comments.Before, cgoLibrary.Call.Comments.Before...)
fixedGoLibraryExpr.Comments.Suffix = append(fixedGoLibraryExpr.Comments.Suffix, cgoLibrary.Call.Comments.Suffix...)
fixedGoLibraryExpr.Comments.After = append(fixedGoLibraryExpr.Comments.After, cgoLibrary.Call.Comments.After...)
for _, key := range []string{"cdeps", "clinkopts", "copts", "data", "deps", "gc_goopts", "srcs"} {
goLibraryAttr := fixedGoLibrary.Attr(key)
cgoLibraryAttr := cgoLibrary.Attr(key)
if cgoLibraryAttr == nil {
continue
}
if fixedAttr, err := squashExpr(goLibraryAttr, cgoLibraryAttr); err == nil {
fixedGoLibrary.SetAttr(key, fixedAttr)
}
}
// Rebuild the file with the cgo_library removed and the go_library replaced.
// If the go_library didn't already exist, it will replace cgo_library.
fixedFile := *oldFile
if goLibrary.Call == nil {
fixedFile.Stmt = append([]bf.Expr{}, oldFile.Stmt...)
fixedFile.Stmt[cgoLibraryIndex] = &fixedGoLibraryExpr
} else {
fixedFile.Stmt = append(oldFile.Stmt[:cgoLibraryIndex], oldFile.Stmt[cgoLibraryIndex+1:]...)
if goLibraryIndex > cgoLibraryIndex {
goLibraryIndex--
}
fixedFile.Stmt[goLibraryIndex] = &fixedGoLibraryExpr
}
return &fixedFile
}
// squashExpr combines two expressions. Unlike mergeExpr, squashExpr does not
// discard information from an "old" expression. It does not sort or de-duplicate
// elements. Any non-scalar expressions that mergeExpr understands can be
// squashed.
func squashExpr(x, y bf.Expr) (bf.Expr, error) {
xExprs, err := extractPlatformStringsExprs(x)
if err != nil {
return nil, err
}
yExprs, err := extractPlatformStringsExprs(y)
if err != nil {
return nil, err
}
squashedExprs, err := squashPlatformStringsExprs(xExprs, yExprs)
if err != nil {
return nil, err
}
return makePlatformStringsExpr(squashedExprs), nil
}
func squashPlatformStringsExprs(x, y platformStringsExprs) (platformStringsExprs, error) {
var ps platformStringsExprs
var err error
ps.generic = squashList(x.generic, y.generic)
if ps.os, err = squashDict(x.os, y.os); err != nil {
return platformStringsExprs{}, err
}
if ps.arch, err = squashDict(x.arch, y.arch); err != nil {
return platformStringsExprs{}, err
}
if ps.platform, err = squashDict(x.platform, y.platform); err != nil {
return platformStringsExprs{}, err
}
return ps, nil
}
func squashList(x, y *bf.ListExpr) *bf.ListExpr {
if x == nil {
return y
}
if y == nil {
return x
}
squashed := *x
squashed.Comments.Before = append(x.Comments.Before, y.Comments.Before...)
squashed.Comments.Suffix = append(x.Comments.Suffix, y.Comments.Suffix...)
squashed.Comments.After = append(x.Comments.After, y.Comments.After...)
squashed.List = append(x.List, y.List...)
return &squashed
}
func squashDict(x, y *bf.DictExpr) (*bf.DictExpr, error) {
if x == nil {
return y, nil
}
if y == nil {
return x, nil
}
squashed := *x
squashed.Comments.Before = append(x.Comments.Before, y.Comments.Before...)
squashed.Comments.Suffix = append(x.Comments.Suffix, y.Comments.Suffix...)
squashed.Comments.After = append(x.Comments.After, y.Comments.After...)
xCaseIndex := make(map[string]int)
for i, e := range x.List {
kv, ok := e.(*bf.KeyValueExpr)
if !ok {
continue
}
key, ok := kv.Key.(*bf.StringExpr)
if !ok {
continue
}
xCaseIndex[key.Value] = i
}
for _, e := range y.List {
kv, ok := e.(*bf.KeyValueExpr)
if !ok {
squashed.List = append(squashed.List, e)
continue
}
key, ok := e.(*bf.StringExpr)
if !ok {
squashed.List = append(squashed.List, e)
continue
}
i, ok := xCaseIndex[key.Value]
if !ok {
squashed.List = append(squashed.List, e)
continue
}
squashedElem, err := squashExpr(x.List[i], kv.Value)
if err != nil {
return nil, err
}
x.List[i] = squashedElem
}
return &squashed, nil
}
// removeLegacyProto removes uses of the old proto rules. It deletes loads
// from go_proto_library.bzl. It deletes proto filegroups. It removes
// go_proto_library attributes which are no longer recognized. New rules
// are generated in place of the deleted rules, but attributes and comments
// are not migrated.
func removeLegacyProto(c *config.Config, oldFile *bf.File) *bf.File {
// Don't fix if the proto mode was set to something other than the default.
if c.ProtoMode != config.DefaultProtoMode {
return oldFile
}
// Scan for definitions to delete.
var deletedIndices []int
var protoIndices []int
shouldDeleteProtos := false
for i, stmt := range oldFile.Stmt {
c, ok := stmt.(*bf.CallExpr)
if !ok {
continue
}
x, ok := c.X.(*bf.LiteralExpr)
if !ok {
continue
}
if x.Token == "load" && len(c.List) > 0 {
if name, ok := c.List[0].(*bf.StringExpr); ok && name.Value == "@io_bazel_rules_go//proto:go_proto_library.bzl" {
deletedIndices = append(deletedIndices, i)
shouldDeleteProtos = true
}
continue
}
if x.Token == "filegroup" {
r := bf.Rule{Call: c}
if r.Name() == config.DefaultProtosName {
deletedIndices = append(deletedIndices, i)
}
continue
}
if x.Token == "go_proto_library" {
protoIndices = append(protoIndices, i)
}
}
if len(deletedIndices) == 0 {
return oldFile
}
// Rebuild the file without deleted statements. Only delete go_proto_library
// rules if we deleted a load.
if shouldDeleteProtos {
deletedIndices = append(deletedIndices, protoIndices...)
sort.Ints(deletedIndices)
}
fixedFile := *oldFile
fixedFile.Stmt = deleteIndices(oldFile.Stmt, deletedIndices)
return &fixedFile
}
// FixFileMinor updates rules in oldFile that were generated by an older version
// of Gazelle to a newer form that can be merged with freshly generated rules.
//
// FixFileMinor includes only small, low-risk fixes that can be applied in
// update mode. When both FixFileMinor and FixFile are called, FixFileMinor
// should be called first.
//
// FixLoads should be called after this, since it will fix load
// statements that may be broken by transformations applied by this function.
func FixFileMinor(c *config.Config, oldFile *bf.File) *bf.File {
fixedFile := migrateLibraryEmbed(c, oldFile)
fixedFile = migrateGrpcCompilers(c, fixedFile)
return removeBinaryImportPath(c, fixedFile)
}
// migrateLibraryEmbed converts "library" attributes to "embed" attributes,
// preserving comments. This only applies to Go rules, and only if there is
// no keep comment on "library" and no existing "embed" attribute.
func migrateLibraryEmbed(c *config.Config, oldFile *bf.File) *bf.File {
fixed := false
fixedFile := *oldFile
for i, stmt := range fixedFile.Stmt {
call, ok := stmt.(*bf.CallExpr)
if !ok {
continue
}
rule := bf.Rule{Call: call}
if kind := rule.Kind(); !isGoRule(kind) || shouldKeep(stmt) {
continue
}
libExpr := rule.Attr("library")
if libExpr == nil || shouldKeep(libExpr) || rule.Attr("embed") != nil {
continue
}
fixedCall := *call
rule.Call = &fixedCall
rule.DelAttr("library")
rule.SetAttr("embed", &bf.ListExpr{List: []bf.Expr{libExpr}})
fixedFile.Stmt[i] = &fixedCall
fixed = true
}
if !fixed {
return oldFile
}
return &fixedFile
}
// migrateGrpcCompilers converts "go_grpc_library" rules into "go_proto_library"
// rules with a "compilers" attribute.
func migrateGrpcCompilers(c *config.Config, oldFile *bf.File) *bf.File {
fixed := false
fixedFile := *oldFile
for i, stmt := range fixedFile.Stmt {
call, ok := stmt.(*bf.CallExpr)
if !ok {
continue
}
rule := bf.Rule{Call: call}
if rule.Kind() != "go_grpc_library" || shouldKeep(stmt) || rule.Attr("compilers") != nil {
continue
}
fixedCall := *call
fixedCall.List = make([]bf.Expr, len(call.List))
copy(fixedCall.List, call.List)
rule.Call = &fixedCall
rule.SetKind("go_proto_library")
rule.SetAttr("compilers", &bf.ListExpr{
List: []bf.Expr{&bf.StringExpr{Value: config.GrpcCompilerLabel}},
})
fixedFile.Stmt[i] = &fixedCall
fixed = true
}
if !fixed {
return oldFile
}
return &fixedFile
}
// removeBinaryImportPath removes "importpath" attributes from "go_binary"
// and "go_test" rules. These are now deprecated.
func removeBinaryImportPath(c *config.Config, oldFile *bf.File) *bf.File {
fixed := false
fixedFile := *oldFile
for i, stmt := range fixedFile.Stmt {
call, ok := stmt.(*bf.CallExpr)
if !ok {
continue
}
rule := bf.Rule{Call: call}
if rule.Kind() != "go_binary" && rule.Kind() != "go_test" || rule.Attr("importpath") == nil {
continue
}
fixedCall := *call
fixedCall.List = make([]bf.Expr, len(call.List))
copy(fixedCall.List, call.List)
rule.Call = &fixedCall
rule.DelAttr("importpath")
fixedFile.Stmt[i] = &fixedCall
fixed = true
}
if !fixed {
return oldFile
}
return &fixedFile
}
// FixLoads removes loads of unused go rules and adds loads of newly used rules.
// This should be called after FixFile and MergeWithExisting, since symbols
// may be introduced that aren't loaded.
func FixLoads(oldFile *bf.File) *bf.File {
// Make a list of load statements in the file. Keep track of loads of known
// files, since these may be changed. Keep track of known symbols loaded from
// unknown files; we will not add loads for these.
type loadInfo struct {
index int
file string
old, fixed *bf.CallExpr
}
var loads []loadInfo
otherLoadedKinds := make(map[string]bool)
for i, stmt := range oldFile.Stmt {
c, ok := stmt.(*bf.CallExpr)
if !ok {
continue
}
x, ok := c.X.(*bf.LiteralExpr)
if !ok || x.Token != "load" {
continue
}
if len(c.List) == 0 {
continue
}
label, ok := c.List[0].(*bf.StringExpr)
if !ok {
continue
}
if knownFiles[label.Value] {
loads = append(loads, loadInfo{index: i, file: label.Value, old: c})
continue
}
for _, arg := range c.List[1:] {
switch sym := arg.(type) {
case *bf.StringExpr:
otherLoadedKinds[sym.Value] = true
case *bf.BinaryExpr:
if sym.Op != "=" {
continue
}
if x, ok := sym.X.(*bf.LiteralExpr); ok {
otherLoadedKinds[x.Token] = true
}
}
}
}
// Make a map of all the symbols from known files used in this file.
usedKinds := make(map[string]map[string]bool)
for _, stmt := range oldFile.Stmt {
c, ok := stmt.(*bf.CallExpr)
if !ok {
continue
}
x, ok := c.X.(*bf.LiteralExpr)
if !ok {
continue
}
kind := x.Token
if file, ok := knownKinds[kind]; ok && !otherLoadedKinds[kind] {
if usedKinds[file] == nil {
usedKinds[file] = make(map[string]bool)
}
usedKinds[file][kind] = true
}
}
// Fix the load statements. The order is important, so we iterate over
// knownLoads instead of knownFiles.
changed := false
var newFirstLoads []*bf.CallExpr
for _, l := range knownLoads {
file := l.file
first := true
for i, _ := range loads {
li := &loads[i]
if li.file != file {
continue
}
if first {
li.fixed = fixLoad(li.old, file, usedKinds[file])
first = false
} else {
li.fixed = fixLoad(li.old, file, nil)
}
changed = changed || li.fixed != li.old
}
if first {
load := fixLoad(nil, file, usedKinds[file])
if load != nil {
newFirstLoads = append(newFirstLoads, load)
changed = true
}
}
}
if !changed {
return oldFile
}
// Rebuild the file.
fixedFile := *oldFile
fixedFile.Stmt = make([]bf.Expr, 0, len(oldFile.Stmt)+len(newFirstLoads))
for _, l := range newFirstLoads {
fixedFile.Stmt = append(fixedFile.Stmt, l)
}
loadIndex := 0
for i, stmt := range oldFile.Stmt {
if loadIndex < len(loads) && i == loads[loadIndex].index {
if loads[loadIndex].fixed != nil {
fixedFile.Stmt = append(fixedFile.Stmt, loads[loadIndex].fixed)
}
loadIndex++
continue
}
fixedFile.Stmt = append(fixedFile.Stmt, stmt)
}
return &fixedFile
}
// knownLoads is a list of files Gazelle will generate loads from and
// the symbols it knows about. All symbols Gazelle ever generated
// loads for are present, including symbols it no longer uses (e.g.,
// cgo_library). Manually loaded symbols (e.g., go_embed_data) are not
// included. The order of the files here will match the order of
// generated load statements. The symbols should be sorted
// lexicographically.
var knownLoads = []struct {
file string
kinds []string
}{
{
"@io_bazel_rules_go//go:def.bzl",
[]string{
"cgo_library",
"go_binary",
"go_library",
"go_prefix",
"go_repository",
"go_test",
},
}, {
"@io_bazel_rules_go//proto:def.bzl",
[]string{
"go_grpc_library",
"go_proto_library",
},
},
}
// knownFiles is the set of labels for files that Gazelle loads symbols from.
var knownFiles map[string]bool
// knownKinds is a map from symbols to labels of the files they are loaded
// from.
var knownKinds map[string]string
func init() {
knownFiles = make(map[string]bool)
knownKinds = make(map[string]string)
for _, l := range knownLoads {
knownFiles[l.file] = true
for _, k := range l.kinds {
knownKinds[k] = l.file
}
}
}
// fixLoad updates a load statement. load must be a load statement for
// the Go rules or nil. If nil, a new statement may be created. Symbols in
// kinds are added if they are not already present, symbols in knownKinds
// are removed if they are not in kinds, and other symbols and arguments
// are preserved. nil is returned if the statement should be deleted because
// it is empty.
func fixLoad(load *bf.CallExpr, file string, kinds map[string]bool) *bf.CallExpr {
var fixed bf.CallExpr
if load == nil {
fixed = bf.CallExpr{
X: &bf.LiteralExpr{Token: "load"},
List: []bf.Expr{
&bf.StringExpr{Value: file},
},
ForceCompact: true,
}
} else {
fixed = *load
}
var symbols []*bf.StringExpr
var otherArgs []bf.Expr
loadedKinds := make(map[string]bool)
var added, removed int
for _, arg := range fixed.List[1:] {
if s, ok := arg.(*bf.StringExpr); ok {
if knownKinds[s.Value] == "" || kinds != nil && kinds[s.Value] {
symbols = append(symbols, s)
loadedKinds[s.Value] = true
} else {
removed++
}
} else {
otherArgs = append(otherArgs, arg)
}
}
if kinds != nil {
for kind, _ := range kinds {
if _, ok := loadedKinds[kind]; !ok {
symbols = append(symbols, &bf.StringExpr{Value: kind})
added++
}
}
}
if added == 0 && removed == 0 {
if load != nil && len(load.List) == 1 {
// Special case: delete existing empty load.
return nil
}
return load
}
sort.Stable(byString(symbols))
fixed.List = fixed.List[:1]
for _, sym := range symbols {
fixed.List = append(fixed.List, sym)
}
fixed.List = append(fixed.List, otherArgs...)
if len(fixed.List) == 1 {
return nil
}
return &fixed
}
type byString []*bf.StringExpr
func (s byString) Len() int {
return len(s)
}
func (s byString) Less(i, j int) bool {
return s[i].Value < s[j].Value
}
func (s byString) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
func isGoRule(kind string) bool {
return kind == "go_library" ||
kind == "go_binary" ||
kind == "go_test" ||
kind == "go_proto_library" ||
kind == "go_grpc_library"
}

View File

@ -0,0 +1,926 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package merger provides methods for merging parsed BUILD files.
package merger
import (
"fmt"
"log"
"sort"
"strings"
"github.com/bazelbuild/bazel-gazelle/internal/config"
"github.com/bazelbuild/bazel-gazelle/internal/label"
bf "github.com/bazelbuild/buildtools/build"
)
const keep = "keep" // marker in srcs or deps to tell gazelle to preserve.
// MergableAttrs is the set of attribute names for each kind of rule that
// may be merged. When an attribute is mergeable, a generated value may
// replace or augment an existing value. If an attribute is not mergeable,
// existing values are preserved. Generated non-mergeable attributes may
// still be added to a rule if there is no corresponding existing attribute.
type MergeableAttrs map[string]map[string]bool
var (
// PreResolveAttrs is the set of attributes that should be merged before
// dependency resolution, i.e., everything except deps.
PreResolveAttrs MergeableAttrs
// PostResolveAttrs is the set of attributes that should be merged after
// dependency resolution, i.e., deps.
PostResolveAttrs MergeableAttrs
// RepoAttrs is the set of attributes that should be merged in repository
// rules in WORKSPACE.
RepoAttrs MergeableAttrs
// nonEmptyAttrs is the set of attributes that disqualify a rule from being
// deleted after merge.
nonEmptyAttrs MergeableAttrs
)
func init() {
PreResolveAttrs = make(MergeableAttrs)
PostResolveAttrs = make(MergeableAttrs)
RepoAttrs = make(MergeableAttrs)
nonEmptyAttrs = make(MergeableAttrs)
for _, set := range []struct {
mergeableAttrs MergeableAttrs
kinds, attrs []string
}{
{
mergeableAttrs: PreResolveAttrs,
kinds: []string{
"go_library",
"go_binary",
"go_test",
"go_proto_library",
"proto_library",
},
attrs: []string{
"srcs",
},
}, {
mergeableAttrs: PreResolveAttrs,
kinds: []string{
"go_library",
"go_proto_library",
},
attrs: []string{
"importpath",
},
}, {
mergeableAttrs: PreResolveAttrs,
kinds: []string{
"go_library",
"go_binary",
"go_test",
"go_proto_library",
},
attrs: []string{
"cgo",
"clinkopts",
"copts",
"embed",
},
}, {
mergeableAttrs: PreResolveAttrs,
kinds: []string{
"go_proto_library",
},
attrs: []string{
"proto",
},
}, {
mergeableAttrs: PostResolveAttrs,
kinds: []string{
"go_library",
"go_binary",
"go_test",
"go_proto_library",
"proto_library",
},
attrs: []string{
"deps",
config.GazelleImportsKey,
},
}, {
mergeableAttrs: RepoAttrs,
kinds: []string{
"go_repository",
},
attrs: []string{
"commit",
"importpath",
"remote",
"sha256",
"strip_prefix",
"tag",
"type",
"urls",
"vcs",
},
}, {
mergeableAttrs: nonEmptyAttrs,
kinds: []string{
"go_binary",
"go_library",
"go_test",
"proto_library",
},
attrs: []string{
"srcs",
"deps",
},
}, {
mergeableAttrs: nonEmptyAttrs,
kinds: []string{
"go_binary",
"go_library",
"go_test",
},
attrs: []string{
"embed",
},
}, {
mergeableAttrs: nonEmptyAttrs,
kinds: []string{
"go_proto_library",
},
attrs: []string{
"proto",
},
},
} {
for _, kind := range set.kinds {
if set.mergeableAttrs[kind] == nil {
set.mergeableAttrs[kind] = make(map[string]bool)
}
for _, attr := range set.attrs {
set.mergeableAttrs[kind][attr] = true
}
}
}
}
// MergeFile merges the rules in genRules with matching rules in oldFile and
// adds unmatched rules to the end of the merged file. MergeFile also merges
// rules in empty with matching rules in oldFile and deletes rules that
// are empty after merging. attrs is the set of attributes to merge. Attributes
// not in this set will be left alone if they already exist.
func MergeFile(genRules []bf.Expr, empty []bf.Expr, oldFile *bf.File, attrs MergeableAttrs) (mergedFile *bf.File, mergedRules []bf.Expr) {
// Merge empty rules into the file and delete any rules which become empty.
mergedFile = new(bf.File)
*mergedFile = *oldFile
mergedFile.Stmt = append([]bf.Expr{}, oldFile.Stmt...)
var deletedIndices []int
for _, s := range empty {
emptyCall := s.(*bf.CallExpr)
if oldCall, i, _ := match(oldFile.Stmt, emptyCall); oldCall != nil {
mergedRule := mergeRule(emptyCall, oldCall, attrs, oldFile.Path)
if isRuleEmpty(mergedRule) {
deletedIndices = append(deletedIndices, i)
} else {
mergedFile.Stmt[i] = mergedRule
}
}
}
if len(deletedIndices) > 0 {
sort.Ints(deletedIndices)
mergedFile.Stmt = deleteIndices(mergedFile.Stmt, deletedIndices)
}
// Match generated rules with existing rules in the file. Keep track of
// rules with non-standard names.
matchIndices := make([]int, len(genRules))
matchErrors := make([]error, len(genRules))
substitutions := make(map[string]string)
for i, s := range genRules {
genCall := s.(*bf.CallExpr)
oldCall, oldIndex, err := match(mergedFile.Stmt, genCall)
if err != nil {
// TODO(jayconrod): add a verbose mode and log errors. They are too chatty
// to print by default.
matchErrors[i] = err
continue
}
matchIndices[i] = oldIndex // < 0 indicates no match
if oldCall != nil {
oldRule := bf.Rule{Call: oldCall}
genRule := bf.Rule{Call: genCall}
oldName := oldRule.Name()
genName := genRule.Name()
if oldName != genName {
substitutions[genName] = oldName
}
}
}
// Rename labels in generated rules that refer to other generated rules.
if len(substitutions) > 0 {
genRules = append([]bf.Expr{}, genRules...)
for i, s := range genRules {
genRules[i] = substituteRule(s.(*bf.CallExpr), substitutions)
}
}
// Merge generated rules with existing rules or append to the end of the file.
for i := range genRules {
if matchErrors[i] != nil {
continue
}
if matchIndices[i] < 0 {
mergedFile.Stmt = append(mergedFile.Stmt, genRules[i])
mergedRules = append(mergedRules, genRules[i])
} else {
mergedRule := mergeRule(genRules[i].(*bf.CallExpr), mergedFile.Stmt[matchIndices[i]].(*bf.CallExpr), attrs, oldFile.Path)
mergedFile.Stmt[matchIndices[i]] = mergedRule
mergedRules = append(mergedRules, mergedRule)
}
}
return mergedFile, mergedRules
}
// mergeRule combines information from gen and old and returns an updated rule.
// Both rules must be non-nil and must have the same kind and same name.
// attrs is the set of attributes which may be merged.
// If nil is returned, the rule should be deleted.
func mergeRule(gen, old *bf.CallExpr, attrs MergeableAttrs, filename string) bf.Expr {
if old != nil && shouldKeep(old) {
return old
}
genRule := bf.Rule{Call: gen}
oldRule := bf.Rule{Call: old}
merged := *old
merged.List = nil
mergedRule := bf.Rule{Call: &merged}
// Copy unnamed arguments from the old rule without merging. The only rule
// generated with unnamed arguments is go_prefix, which we currently
// leave in place.
// TODO: maybe gazelle should allow the prefix to be changed.
for _, a := range old.List {
if b, ok := a.(*bf.BinaryExpr); ok && b.Op == "=" {
break
}
merged.List = append(merged.List, a)
}
// Merge attributes from the old rule. Preserve comments on old attributes.
// Assume generated attributes have no comments.
kind := oldRule.Kind()
for _, k := range oldRule.AttrKeys() {
oldAttr := oldRule.AttrDefn(k)
if !attrs[kind][k] || shouldKeep(oldAttr) {
merged.List = append(merged.List, oldAttr)
continue
}
oldExpr := oldAttr.Y
genExpr := genRule.Attr(k)
mergedExpr, err := mergeExpr(genExpr, oldExpr)
if err != nil {
start, end := oldExpr.Span()
log.Printf("%s:%d.%d-%d.%d: could not merge expression", filename, start.Line, start.LineRune, end.Line, end.LineRune)
mergedExpr = oldExpr
}
if mergedExpr != nil {
mergedAttr := *oldAttr
mergedAttr.Y = mergedExpr
merged.List = append(merged.List, &mergedAttr)
}
}
// Merge attributes from genRule that we haven't processed already.
for _, k := range genRule.AttrKeys() {
if mergedRule.Attr(k) == nil {
mergedRule.SetAttr(k, genRule.Attr(k))
}
}
return &merged
}
// mergeExpr combines information from gen and old and returns an updated
// expression. The following kinds of expressions are recognized:
//
// * nil
// * strings (can only be merged with strings)
// * lists of strings
// * a call to select with a dict argument. The dict keys must be strings,
// and the values must be lists of strings.
// * a list of strings combined with a select call using +. The list must
// be the left operand.
//
// An error is returned if the expressions can't be merged, for example
// because they are not in one of the above formats.
func mergeExpr(gen, old bf.Expr) (bf.Expr, error) {
if shouldKeep(old) {
return old, nil
}
if gen == nil && (old == nil || isScalar(old)) {
return nil, nil
}
if isScalar(gen) {
return gen, nil
}
genExprs, err := extractPlatformStringsExprs(gen)
if err != nil {
return nil, err
}
oldExprs, err := extractPlatformStringsExprs(old)
if err != nil {
return nil, err
}
mergedExprs, err := mergePlatformStringsExprs(genExprs, oldExprs)
if err != nil {
return nil, err
}
return makePlatformStringsExpr(mergedExprs), nil
}
// platformStringsExprs is a set of sub-expressions that match the structure
// of package.PlatformStrings. rules.Generator produces expressions that
// follow this structure for srcs, deps, and other attributes, so this matches
// all non-scalar expressions generated by Gazelle.
//
// The matched expression has the form:
//
// [] + select({}) + select({}) + select({})
//
// The four collections may appear in any order, and some or all of them may
// be omitted (all fields are nil for a nil expression).
type platformStringsExprs struct {
generic *bf.ListExpr
os, arch, platform *bf.DictExpr
}
// extractPlatformStringsExprs matches an expression and attempts to extract
// sub-expressions in platformStringsExprs. The sub-expressions can then be
// merged with corresponding sub-expressions. Any field in the returned
// structure may be nil. An error is returned if the given expression does
// not follow the pattern described by platformStringsExprs.
func extractPlatformStringsExprs(expr bf.Expr) (platformStringsExprs, error) {
var ps platformStringsExprs
if expr == nil {
return ps, nil
}
// Break the expression into a sequence of expressions combined with +.
var parts []bf.Expr
for {
binop, ok := expr.(*bf.BinaryExpr)
if !ok {
parts = append(parts, expr)
break
}
parts = append(parts, binop.Y)
expr = binop.X
}
// Process each part. They may be in any order.
for _, part := range parts {
switch part := part.(type) {
case *bf.ListExpr:
if ps.generic != nil {
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: multiple list expressions")
}
ps.generic = part
case *bf.CallExpr:
x, ok := part.X.(*bf.LiteralExpr)
if !ok || x.Token != "select" || len(part.List) != 1 {
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: callee other than select or wrong number of args")
}
arg, ok := part.List[0].(*bf.DictExpr)
if !ok {
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: select argument not dict")
}
var dict **bf.DictExpr
for _, item := range arg.List {
kv := item.(*bf.KeyValueExpr) // parser guarantees this
k, ok := kv.Key.(*bf.StringExpr)
if !ok {
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: dict keys are not all strings")
}
if k.Value == "//conditions:default" {
continue
}
key, err := label.Parse(k.Value)
if err != nil {
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: dict key is not label: %q", k.Value)
}
if config.KnownOSSet[key.Name] {
dict = &ps.os
break
}
if config.KnownArchSet[key.Name] {
dict = &ps.arch
break
}
osArch := strings.Split(key.Name, "_")
if len(osArch) != 2 || !config.KnownOSSet[osArch[0]] || !config.KnownArchSet[osArch[1]] {
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: dict key contains unknown platform: %q", k.Value)
}
dict = &ps.platform
break
}
if dict == nil {
// We could not identify the dict because it's empty or only contains
// //conditions:default. We'll call it the platform dict to avoid
// dropping it.
dict = &ps.platform
}
if *dict != nil {
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: multiple selects that are either os-specific, arch-specific, or platform-specific")
}
*dict = arg
}
}
return ps, nil
}
// makePlatformStringsExpr constructs a single expression from the
// sub-expressions in ps.
func makePlatformStringsExpr(ps platformStringsExprs) bf.Expr {
makeSelect := func(dict *bf.DictExpr) bf.Expr {
return &bf.CallExpr{
X: &bf.LiteralExpr{Token: "select"},
List: []bf.Expr{dict},
}
}
forceMultiline := func(e bf.Expr) {
switch e := e.(type) {
case *bf.ListExpr:
e.ForceMultiLine = true
case *bf.CallExpr:
e.List[0].(*bf.DictExpr).ForceMultiLine = true
}
}
var parts []bf.Expr
if ps.generic != nil {
parts = append(parts, ps.generic)
}
if ps.os != nil {
parts = append(parts, makeSelect(ps.os))
}
if ps.arch != nil {
parts = append(parts, makeSelect(ps.arch))
}
if ps.platform != nil {
parts = append(parts, makeSelect(ps.platform))
}
if len(parts) == 0 {
return nil
}
if len(parts) == 1 {
return parts[0]
}
expr := parts[0]
forceMultiline(expr)
for _, part := range parts[1:] {
forceMultiline(part)
expr = &bf.BinaryExpr{
Op: "+",
X: expr,
Y: part,
}
}
return expr
}
func mergePlatformStringsExprs(gen, old platformStringsExprs) (platformStringsExprs, error) {
var ps platformStringsExprs
var err error
ps.generic = mergeList(gen.generic, old.generic)
if ps.os, err = mergeDict(gen.os, old.os); err != nil {
return platformStringsExprs{}, err
}
if ps.arch, err = mergeDict(gen.arch, old.arch); err != nil {
return platformStringsExprs{}, err
}
if ps.platform, err = mergeDict(gen.platform, old.platform); err != nil {
return platformStringsExprs{}, err
}
return ps, nil
}
func mergeList(gen, old *bf.ListExpr) *bf.ListExpr {
if old == nil {
return gen
}
if gen == nil {
gen = &bf.ListExpr{List: []bf.Expr{}}
}
// Build a list of strings from the gen list and keep matching strings
// in the old list. This preserves comments. Also keep anything with
// a "# keep" comment, whether or not it's in the gen list.
genSet := make(map[string]bool)
for _, v := range gen.List {
if s := stringValue(v); s != "" {
genSet[s] = true
}
}
var merged []bf.Expr
kept := make(map[string]bool)
keepComment := false
for _, v := range old.List {
s := stringValue(v)
if keep := shouldKeep(v); keep || genSet[s] {
keepComment = keepComment || keep
merged = append(merged, v)
if s != "" {
kept[s] = true
}
}
}
// Add anything in the gen list that wasn't kept.
for _, v := range gen.List {
if s := stringValue(v); kept[s] {
continue
}
merged = append(merged, v)
}
if len(merged) == 0 {
return nil
}
return &bf.ListExpr{
List: merged,
ForceMultiLine: gen.ForceMultiLine || old.ForceMultiLine || keepComment,
}
}
func mergeDict(gen, old *bf.DictExpr) (*bf.DictExpr, error) {
if old == nil {
return gen, nil
}
if gen == nil {
gen = &bf.DictExpr{List: []bf.Expr{}}
}
var entries []*dictEntry
entryMap := make(map[string]*dictEntry)
for _, kv := range old.List {
k, v, err := dictEntryKeyValue(kv)
if err != nil {
return nil, err
}
if _, ok := entryMap[k]; ok {
return nil, fmt.Errorf("old dict contains more than one case named %q", k)
}
e := &dictEntry{key: k, oldValue: v}
entries = append(entries, e)
entryMap[k] = e
}
for _, kv := range gen.List {
k, v, err := dictEntryKeyValue(kv)
if err != nil {
return nil, err
}
e, ok := entryMap[k]
if !ok {
e = &dictEntry{key: k}
entries = append(entries, e)
entryMap[k] = e
}
e.genValue = v
}
keys := make([]string, 0, len(entries))
haveDefault := false
for _, e := range entries {
e.mergedValue = mergeList(e.genValue, e.oldValue)
if e.key == "//conditions:default" {
// Keep the default case, even if it's empty.
haveDefault = true
if e.mergedValue == nil {
e.mergedValue = &bf.ListExpr{}
}
} else if e.mergedValue != nil {
keys = append(keys, e.key)
}
}
if len(keys) == 0 && (!haveDefault || len(entryMap["//conditions:default"].mergedValue.List) == 0) {
return nil, nil
}
sort.Strings(keys)
// Always put the default case last.
if haveDefault {
keys = append(keys, "//conditions:default")
}
mergedEntries := make([]bf.Expr, len(keys))
for i, k := range keys {
e := entryMap[k]
mergedEntries[i] = &bf.KeyValueExpr{
Key: &bf.StringExpr{Value: e.key},
Value: e.mergedValue,
}
}
return &bf.DictExpr{List: mergedEntries, ForceMultiLine: true}, nil
}
type dictEntry struct {
key string
oldValue, genValue, mergedValue *bf.ListExpr
}
func dictEntryKeyValue(e bf.Expr) (string, *bf.ListExpr, error) {
kv, ok := e.(*bf.KeyValueExpr)
if !ok {
return "", nil, fmt.Errorf("dict entry was not a key-value pair: %#v", e)
}
k, ok := kv.Key.(*bf.StringExpr)
if !ok {
return "", nil, fmt.Errorf("dict key was not string: %#v", kv.Key)
}
v, ok := kv.Value.(*bf.ListExpr)
if !ok {
return "", nil, fmt.Errorf("dict value was not list: %#v", kv.Value)
}
return k.Value, v, nil
}
// substituteAttrs contains a list of attributes for each kind that should be
// processed by substituteRule and substituteExpr. Note that "name" does not
// need to be substituted since it's not mergeable.
var substituteAttrs = map[string][]string{
"go_binary": {"embed"},
"go_library": {"embed"},
"go_test": {"embed"},
"go_proto_library": {"proto"},
}
// substituteRule replaces local labels (those beginning with ":", referring to
// targets in the same package) according to a substitution map. This is used
// to update generated rules before merging when the corresponding existing
// rules have different names. If substituteRule replaces a string, it returns
// a new expression; it will not modify the original expression.
func substituteRule(call *bf.CallExpr, substitutions map[string]string) *bf.CallExpr {
rule := bf.Rule{Call: call}
attrs, ok := substituteAttrs[rule.Kind()]
if !ok {
return call
}
didCopy := false
for i, arg := range call.List {
kv, ok := arg.(*bf.BinaryExpr)
if !ok || kv.Op != "=" {
continue
}
key, ok := kv.X.(*bf.LiteralExpr)
shouldRename := false
for _, k := range attrs {
shouldRename = shouldRename || key.Token == k
}
if !shouldRename {
continue
}
value := substituteExpr(kv.Y, substitutions)
if value != kv.Y {
if !didCopy {
didCopy = true
callCopy := *call
callCopy.List = append([]bf.Expr{}, call.List...)
call = &callCopy
}
kvCopy := *kv
kvCopy.Y = value
call.List[i] = &kvCopy
}
}
return call
}
// substituteExpr replaces local labels according to a substitution map.
// It only supports string and list expressions (which should be sufficient
// for generated rules). If it replaces a string, it returns a new expression;
// otherwise, it returns e.
func substituteExpr(e bf.Expr, substitutions map[string]string) bf.Expr {
switch e := e.(type) {
case *bf.StringExpr:
if rename, ok := substitutions[strings.TrimPrefix(e.Value, ":")]; ok {
return &bf.StringExpr{Value: ":" + rename}
}
case *bf.ListExpr:
var listCopy *bf.ListExpr
for i, elem := range e.List {
renamed := substituteExpr(elem, substitutions)
if renamed != elem {
if listCopy == nil {
listCopy = new(bf.ListExpr)
*listCopy = *e
listCopy.List = append([]bf.Expr{}, e.List...)
}
listCopy.List[i] = renamed
}
}
if listCopy != nil {
return listCopy
}
}
return e
}
// shouldKeep returns whether an expression from the original file should be
// preserved. This is true if it has a prefix or end-of-line comment "keep".
// Note that bf.Rewrite recognizes "keep sorted" comments which are different,
// so we don't recognize comments that only start with "keep".
func shouldKeep(e bf.Expr) bool {
for _, c := range append(e.Comment().Before, e.Comment().Suffix...) {
text := strings.TrimSpace(strings.TrimPrefix(c.Token, "#"))
if text == keep {
return true
}
}
return false
}
// matchAttrs contains lists of attributes for each kind that are used in
// matching. For example, importpath attributes can be used to match go_library
// rules, even when the names are different.
var matchAttrs = map[string][]string{
"go_library": {"importpath"},
"go_proto_library": {"importpath"},
"go_repository": {"importpath"},
}
// matchAny is a set of kinds which may be matched regardless of attributes.
// For example, if there is only one go_binary in a package, any go_binary
// rule will match.
var matchAny = map[string]bool{"go_binary": true}
// match searches for a rule that can be merged with x in stmts.
//
// A rule is considered a match if its kind is equal to x's kind AND either its
// name is equal OR at least one of the attributes in matchAttrs is equal.
//
// If there are no matches, nil, -1, and nil are returned.
//
// If a rule has the same name but a different kind, nil, -1, and an error
// are returned.
//
// If there is exactly one match, the rule, its index in stmts, and nil
// are returned.
//
// If there are multiple matches, match will attempt to disambiguate, based on
// the quality of the match (name match is best, then attribute match in the
// order that attributes are listed). If disambiguation is successful,
// the rule, its index in stmts, and nil are returned. Otherwise, nil, -1,
// and an error are returned.
func match(stmts []bf.Expr, x *bf.CallExpr) (*bf.CallExpr, int, error) {
type matchInfo struct {
rule bf.Rule
index int
}
xr := bf.Rule{Call: x}
xname := xr.Name()
xkind := xr.Kind()
var nameMatches []matchInfo
var kindMatches []matchInfo
for i, s := range stmts {
y, ok := s.(*bf.CallExpr)
if !ok {
continue
}
yr := bf.Rule{Call: y}
if xname == yr.Name() {
nameMatches = append(nameMatches, matchInfo{yr, i})
}
if xkind == yr.Kind() {
kindMatches = append(kindMatches, matchInfo{yr, i})
}
}
if len(nameMatches) == 1 {
if ykind := nameMatches[0].rule.Kind(); xkind != ykind {
return nil, -1, fmt.Errorf("could not merge %s(%s): a rule of the same name has kind %s", xkind, xname, ykind)
}
return nameMatches[0].rule.Call, nameMatches[0].index, nil
}
if len(nameMatches) > 1 {
return nil, -1, fmt.Errorf("could not merge %s(%s): multiple rules have the same name", xname)
}
attrs := matchAttrs[xr.Kind()]
for _, key := range attrs {
var attrMatches []matchInfo
xvalue := xr.AttrString(key)
if xvalue == "" {
continue
}
for _, m := range kindMatches {
if xvalue == m.rule.AttrString(key) {
attrMatches = append(attrMatches, m)
}
}
if len(attrMatches) == 1 {
return attrMatches[0].rule.Call, attrMatches[0].index, nil
} else if len(attrMatches) > 1 {
return nil, -1, fmt.Errorf("could not merge %s(%s): multiple rules have the same attribute %s = %q", xkind, xname, key, xvalue)
}
}
if matchAny[xkind] {
if len(kindMatches) == 1 {
return kindMatches[0].rule.Call, kindMatches[0].index, nil
} else if len(kindMatches) > 1 {
return nil, -1, fmt.Errorf("could not merge %s(%s): multiple rules have the same kind but different names", xkind, xname)
}
}
return nil, -1, nil
}
func kind(c *bf.CallExpr) string {
return (&bf.Rule{Call: c}).Kind()
}
func name(c *bf.CallExpr) string {
return (&bf.Rule{Call: c}).Name()
}
// isRuleEmpty returns true if a rule cannot be built because it has no sources,
// dependencies, or embeds after merging. This is based on a per-kind whitelist
// of attributes. Other attributes, like "name" and "visibility" don't affect
// emptiness. Always returns false for expressions that aren't in the known
// set of rules.
func isRuleEmpty(e bf.Expr) bool {
c, ok := e.(*bf.CallExpr)
if !ok {
return false
}
r := bf.Rule{Call: c}
kind := r.Kind()
if nonEmptyAttrs[kind] == nil {
return false
}
for _, attr := range r.AttrKeys() {
if nonEmptyAttrs[kind][attr] {
return false
}
}
return true
}
func isScalar(e bf.Expr) bool {
switch e.(type) {
case *bf.StringExpr, *bf.LiteralExpr:
return true
default:
return false
}
}
func stringValue(e bf.Expr) string {
s, ok := e.(*bf.StringExpr)
if !ok {
return ""
}
return s.Value
}
// deleteIndices copies a list, dropping elements at deletedIndices.
// deletedIndices must be sorted.
func deleteIndices(stmt []bf.Expr, deletedIndices []int) []bf.Expr {
kept := make([]bf.Expr, 0, len(stmt)-len(deletedIndices))
di := 0
for i, s := range stmt {
if di < len(deletedIndices) && i == deletedIndices[di] {
di++
continue
}
kept = append(kept, s)
}
return kept
}

View File

@ -0,0 +1,34 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"doc.go",
"fileinfo.go",
"fileinfo_go.go",
"fileinfo_proto.go",
"package.go",
"walk.go",
],
importpath = "github.com/bazelbuild/bazel-gazelle/internal/packages",
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
deps = [
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:go_default_library",
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,17 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package packages provides Go package traversal in a Bazel repository.
package packages

View File

@ -0,0 +1,403 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package packages
import (
"bufio"
"log"
"os"
"path"
"path/filepath"
"strings"
"github.com/bazelbuild/bazel-gazelle/internal/config"
)
// fileInfo holds information used to decide how to build a file. This
// information comes from the file's name, from package and import declarations
// (in .go files), and from +build and cgo comments.
type fileInfo struct {
path, rel, name, ext string
// packageName is the Go package name of a .go file, without the
// "_test" suffix if it was present. It is empty for non-Go files.
packageName string
// importPath is the canonical import path for this file's package.
// This may be read from a package comment (in Go) or a go_package
// option (in proto). This field is empty for files that don't specify
// an import path.
importPath string
// category is the type of file, based on extension.
category extCategory
// isTest is true if the file stem (the part before the extension)
// ends with "_test.go". This is never true for non-Go files.
isTest bool
// isXTest is true for test Go files whose declared package name ends
// with "_test".
isXTest bool
// imports is a list of packages imported by a file. It does not include
// "C" or anything from the standard library.
imports []string
// isCgo is true for .go files that import "C".
isCgo bool
// goos and goarch contain the OS and architecture suffixes in the filename,
// if they were present.
goos, goarch string
// tags is a list of build tag lines. Each entry is the trimmed text of
// a line after a "+build" prefix.
tags []tagLine
// copts and clinkopts contain flags that are part of CFLAGS, CPPFLAGS,
// CXXFLAGS, and LDFLAGS directives in cgo comments.
copts, clinkopts []taggedOpts
// hasServices indicates whether a .proto file has service definitions.
hasServices bool
}
// tagLine represents the space-separated disjunction of build tag groups
// in a line comment.
type tagLine []tagGroup
// check returns true if at least one of the tag groups is satisfied.
func (l tagLine) check(c *config.Config, os, arch string) bool {
if len(l) == 0 {
return false
}
for _, g := range l {
if g.check(c, os, arch) {
return true
}
}
return false
}
// tagGroup represents a comma-separated conjuction of build tags.
type tagGroup []string
// check returns true if all of the tags are true. Tags that start with
// "!" are negated (but "!!") is not allowed. Go release tags (e.g., "go1.8")
// are ignored. If the group contains an os or arch tag, but the os or arch
// parameters are empty, check returns false even if the tag is negated.
func (g tagGroup) check(c *config.Config, os, arch string) bool {
for _, t := range g {
if strings.HasPrefix(t, "!!") { // bad syntax, reject always
return false
}
not := strings.HasPrefix(t, "!")
if not {
t = t[1:]
}
if isIgnoredTag(t) {
// Release tags are treated as "unknown" and are considered true,
// whether or not they are negated.
continue
}
var match bool
if _, ok := config.KnownOSSet[t]; ok {
if os == "" {
return false
}
match = os == t
} else if _, ok := config.KnownArchSet[t]; ok {
if arch == "" {
return false
}
match = arch == t
} else {
match = c.GenericTags[t]
}
if not {
match = !match
}
if !match {
return false
}
}
return true
}
// taggedOpts a list of compile or link options which should only be applied
// if the given set of build tags are satisfied. These options have already
// been tokenized using the same algorithm that "go build" uses, then joined
// with OptSeparator.
type taggedOpts struct {
tags tagLine
opts string
}
// OptSeparator is a special character inserted between options that appeared
// together in a #cgo directive. This allows options to be split, modified,
// and escaped by other packages.
//
// It's important to keep options grouped together in the same string. For
// example, if we have "-framework IOKit" together in a #cgo directive,
// "-framework" shouldn't be treated as a separate string for the purposes of
// sorting and de-duplicating.
const OptSeparator = "\x1D"
// extCategory indicates how a file should be treated, based on extension.
type extCategory int
const (
// ignoredExt is applied to files which are not part of a build.
ignoredExt extCategory = iota
// unsupportedExt is applied to files that we don't support but would be
// built with "go build".
unsupportedExt
// goExt is applied to .go files.
goExt
// cExt is applied to C and C++ files.
cExt
// hExt is applied to header files. If cgo code is present, these may be
// C or C++ headers. If not, they are treated as Go assembly headers.
hExt
// sExt is applied to Go assembly files, ending with .s.
sExt
// csExt is applied to other assembly files, ending with .S. These are built
// with the C compiler if cgo code is present.
csExt
// protoExt is applied to .proto files.
protoExt
)
// fileNameInfo returns information that can be inferred from the name of
// a file. It does not read data from the file.
func fileNameInfo(dir, rel, name string) fileInfo {
ext := path.Ext(name)
// Categorize the file based on extension. Based on go/build.Context.Import.
var category extCategory
switch ext {
case ".go":
category = goExt
case ".c", ".cc", ".cpp", ".cxx":
category = cExt
case ".h", ".hh", ".hpp", ".hxx":
category = hExt
case ".s":
category = sExt
case ".S":
category = csExt
case ".proto":
category = protoExt
case ".m", ".f", ".F", ".for", ".f90", ".swig", ".swigcxx", ".syso":
category = unsupportedExt
default:
category = ignoredExt
}
// Determine test, goos, and goarch. This is intended to match the logic
// in goodOSArchFile in go/build.
var isTest bool
var goos, goarch string
l := strings.Split(name[:len(name)-len(ext)], "_")
if len(l) >= 2 && l[len(l)-1] == "test" {
isTest = category == goExt
l = l[:len(l)-1]
}
switch {
case len(l) >= 3 && config.KnownOSSet[l[len(l)-2]] && config.KnownArchSet[l[len(l)-1]]:
goos = l[len(l)-2]
goarch = l[len(l)-1]
case len(l) >= 2 && config.KnownOSSet[l[len(l)-1]]:
goos = l[len(l)-1]
case len(l) >= 2 && config.KnownArchSet[l[len(l)-1]]:
goarch = l[len(l)-1]
}
return fileInfo{
path: filepath.Join(dir, name),
rel: rel,
name: name,
ext: ext,
category: category,
isTest: isTest,
goos: goos,
goarch: goarch,
}
}
// otherFileInfo returns information about a non-.go file. It will parse
// part of the file to determine build tags. If the file can't be read, an
// error will be logged, and partial information will be returned.
func otherFileInfo(dir, rel, name string) fileInfo {
info := fileNameInfo(dir, rel, name)
if info.category == ignoredExt {
return info
}
if info.category == unsupportedExt {
log.Printf("%s: warning: file extension not yet supported", info.path)
return info
}
tags, err := readTags(info.path)
if err != nil {
log.Printf("%s: error reading file: %v", info.path, err)
return info
}
info.tags = tags
return info
}
// readTags reads and extracts build tags from the block of comments
// and blank lines at the start of a file which is separated from the
// rest of the file by a blank line. Each string in the returned slice
// is the trimmed text of a line after a "+build" prefix.
// Based on go/build.Context.shouldBuild.
func readTags(path string) ([]tagLine, error) {
f, err := os.Open(path)
if err != nil {
return nil, err
}
defer f.Close()
scanner := bufio.NewScanner(f)
// Pass 1: Identify leading run of // comments and blank lines,
// which must be followed by a blank line.
var lines []string
end := 0
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if line == "" {
end = len(lines)
continue
}
if strings.HasPrefix(line, "//") {
lines = append(lines, line[len("//"):])
continue
}
break
}
if err := scanner.Err(); err != nil {
return nil, err
}
lines = lines[:end]
// Pass 2: Process each line in the run.
var tagLines []tagLine
for _, line := range lines {
fields := strings.Fields(line)
if len(fields) > 0 && fields[0] == "+build" {
tagLines = append(tagLines, parseTagsInGroups(fields[1:]))
}
}
return tagLines, nil
}
func parseTagsInGroups(groups []string) tagLine {
var l tagLine
for _, g := range groups {
l = append(l, tagGroup(strings.Split(g, ",")))
}
return l
}
func isOSArchSpecific(info fileInfo, cgoTags tagLine) (osSpecific, archSpecific bool) {
if info.goos != "" {
osSpecific = true
}
if info.goarch != "" {
archSpecific = true
}
lines := info.tags
if len(cgoTags) > 0 {
lines = append(lines, cgoTags)
}
for _, line := range lines {
for _, group := range line {
for _, tag := range group {
if strings.HasPrefix(tag, "!") {
tag = tag[1:]
}
_, osOk := config.KnownOSSet[tag]
if osOk {
osSpecific = true
}
_, archOk := config.KnownArchSet[tag]
if archOk {
archSpecific = true
}
}
}
}
return osSpecific, archSpecific
}
// checkConstraints determines whether build constraints are satisfied on
// a given platform.
//
// The first few arguments describe the platform. genericTags is the set
// of build tags that are true on all platforms. os and arch are the platform
// GOOS and GOARCH strings. If os or arch is empty, checkConstraints will
// return false in the presence of OS and architecture constraints, even
// if they are negated.
//
// The remaining arguments describe the file being tested. All of these may
// be empty or nil. osSuffix and archSuffix are filename suffixes. fileTags
// is a list tags from +build comments found near the top of the file. cgoTags
// is an extra set of tags in a #cgo directive.
func checkConstraints(c *config.Config, os, arch, osSuffix, archSuffix string, fileTags []tagLine, cgoTags tagLine) bool {
if osSuffix != "" && osSuffix != os || archSuffix != "" && archSuffix != arch {
return false
}
for _, l := range fileTags {
if !l.check(c, os, arch) {
return false
}
}
if len(cgoTags) > 0 && !cgoTags.check(c, os, arch) {
return false
}
return true
}
// isIgnoredTag returns whether the tag is "cgo" or is a release tag.
// Release tags match the pattern "go[0-9]\.[0-9]+".
// Gazelle won't consider whether an ignored tag is satisfied when evaluating
// build constraints for a file.
func isIgnoredTag(tag string) bool {
if tag == "cgo" {
return true
}
if len(tag) < 5 || !strings.HasPrefix(tag, "go") {
return false
}
if tag[2] < '0' || tag[2] > '9' || tag[3] != '.' {
return false
}
for _, c := range tag[4:] {
if c < '0' || c > '9' {
return false
}
}
return true
}

View File

@ -0,0 +1,275 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package packages
import (
"bytes"
"errors"
"fmt"
"go/ast"
"go/parser"
"go/token"
"log"
"path/filepath"
"strconv"
"strings"
"unicode"
"unicode/utf8"
"github.com/bazelbuild/bazel-gazelle/internal/config"
)
// goFileInfo returns information about a .go file. It will parse part of the
// file to determine the package name, imports, and build constraints.
// If the file can't be read, an error will be logged, and partial information
// will be returned.
// This function is intended to match go/build.Context.Import.
// TODD(#53): extract canonical import path
func goFileInfo(c *config.Config, dir, rel, name string) fileInfo {
info := fileNameInfo(dir, rel, name)
fset := token.NewFileSet()
pf, err := parser.ParseFile(fset, info.path, nil, parser.ImportsOnly|parser.ParseComments)
if err != nil {
log.Printf("%s: error reading go file: %v", info.path, err)
return info
}
info.packageName = pf.Name.Name
if info.isTest && strings.HasSuffix(info.packageName, "_test") {
info.isXTest = true
info.packageName = info.packageName[:len(info.packageName)-len("_test")]
}
for _, decl := range pf.Decls {
d, ok := decl.(*ast.GenDecl)
if !ok {
continue
}
for _, dspec := range d.Specs {
spec, ok := dspec.(*ast.ImportSpec)
if !ok {
continue
}
quoted := spec.Path.Value
path, err := strconv.Unquote(quoted)
if err != nil {
log.Printf("%s: error reading go file: %v", info.path, err)
continue
}
if path == "C" {
if info.isTest {
log.Printf("%s: warning: use of cgo in test not supported", info.path)
}
info.isCgo = true
cg := spec.Doc
if cg == nil && len(d.Specs) == 1 {
cg = d.Doc
}
if cg != nil {
if err := saveCgo(&info, cg); err != nil {
log.Printf("%s: error reading go file: %v", info.path, err)
}
}
continue
}
info.imports = append(info.imports, path)
}
}
tags, err := readTags(info.path)
if err != nil {
log.Printf("%s: error reading go file: %v", info.path, err)
return info
}
info.tags = tags
return info
}
// saveCgo extracts CFLAGS, CPPFLAGS, CXXFLAGS, and LDFLAGS directives
// from a comment above a "C" import. This is intended to match logic in
// go/build.Context.saveCgo.
func saveCgo(info *fileInfo, cg *ast.CommentGroup) error {
text := cg.Text()
for _, line := range strings.Split(text, "\n") {
orig := line
// Line is
// #cgo [GOOS/GOARCH...] LDFLAGS: stuff
//
line = strings.TrimSpace(line)
if len(line) < 5 || line[:4] != "#cgo" || (line[4] != ' ' && line[4] != '\t') {
continue
}
// Split at colon.
line = strings.TrimSpace(line[4:])
i := strings.Index(line, ":")
if i < 0 {
return fmt.Errorf("%s: invalid #cgo line: %s", info.path, orig)
}
line, optstr := strings.TrimSpace(line[:i]), strings.TrimSpace(line[i+1:])
// Parse tags and verb.
f := strings.Fields(line)
if len(f) < 1 {
return fmt.Errorf("%s: invalid #cgo line: %s", info.path, orig)
}
verb := f[len(f)-1]
tags := parseTagsInGroups(f[:len(f)-1])
// Parse options.
opts, err := splitQuoted(optstr)
if err != nil {
return fmt.Errorf("%s: invalid #cgo line: %s", info.path, orig)
}
var ok bool
for i, opt := range opts {
if opt, ok = expandSrcDir(opt, info.rel); !ok {
return fmt.Errorf("%s: malformed #cgo argument: %s", info.path, orig)
}
opts[i] = opt
}
joinedStr := strings.Join(opts, OptSeparator)
// Add tags to appropriate list.
switch verb {
case "CFLAGS", "CPPFLAGS", "CXXFLAGS":
info.copts = append(info.copts, taggedOpts{tags, joinedStr})
case "LDFLAGS":
info.clinkopts = append(info.clinkopts, taggedOpts{tags, joinedStr})
case "pkg-config":
return fmt.Errorf("%s: pkg-config not supported: %s", info.path, orig)
default:
return fmt.Errorf("%s: invalid #cgo verb: %s", info.path, orig)
}
}
return nil
}
// splitQuoted splits the string s around each instance of one or more consecutive
// white space characters while taking into account quotes and escaping, and
// returns an array of substrings of s or an empty list if s contains only white space.
// Single quotes and double quotes are recognized to prevent splitting within the
// quoted region, and are removed from the resulting substrings. If a quote in s
// isn't closed err will be set and r will have the unclosed argument as the
// last element. The backslash is used for escaping.
//
// For example, the following string:
//
// a b:"c d" 'e''f' "g\""
//
// Would be parsed as:
//
// []string{"a", "b:c d", "ef", `g"`}
//
// Copied from go/build.splitQuoted
func splitQuoted(s string) (r []string, err error) {
var args []string
arg := make([]rune, len(s))
escaped := false
quoted := false
quote := '\x00'
i := 0
for _, rune := range s {
switch {
case escaped:
escaped = false
case rune == '\\':
escaped = true
continue
case quote != '\x00':
if rune == quote {
quote = '\x00'
continue
}
case rune == '"' || rune == '\'':
quoted = true
quote = rune
continue
case unicode.IsSpace(rune):
if quoted || i > 0 {
quoted = false
args = append(args, string(arg[:i]))
i = 0
}
continue
}
arg[i] = rune
i++
}
if quoted || i > 0 {
args = append(args, string(arg[:i]))
}
if quote != 0 {
err = errors.New("unclosed quote")
} else if escaped {
err = errors.New("unfinished escaping")
}
return args, err
}
// expandSrcDir expands any occurrence of ${SRCDIR}, making sure
// the result is safe for the shell.
//
// Copied from go/build.expandSrcDir
func expandSrcDir(str string, srcdir string) (string, bool) {
// "\" delimited paths cause safeCgoName to fail
// so convert native paths with a different delimiter
// to "/" before starting (eg: on windows).
srcdir = filepath.ToSlash(srcdir)
// Spaces are tolerated in ${SRCDIR}, but not anywhere else.
chunks := strings.Split(str, "${SRCDIR}")
if len(chunks) < 2 {
return str, safeCgoName(str, false)
}
ok := true
for _, chunk := range chunks {
ok = ok && (chunk == "" || safeCgoName(chunk, false))
}
ok = ok && (srcdir == "" || safeCgoName(srcdir, true))
res := strings.Join(chunks, srcdir)
return res, ok && res != ""
}
// NOTE: $ is not safe for the shell, but it is allowed here because of linker options like -Wl,$ORIGIN.
// We never pass these arguments to a shell (just to programs we construct argv for), so this should be okay.
// See golang.org/issue/6038.
// The @ is for OS X. See golang.org/issue/13720.
// The % is for Jenkins. See golang.org/issue/16959.
const safeString = "+-.,/0123456789=ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz:$@%"
const safeSpaces = " "
var safeBytes = []byte(safeSpaces + safeString)
// Copied from go/build.safeCgoName
func safeCgoName(s string, spaces bool) bool {
if s == "" {
return false
}
safe := safeBytes
if !spaces {
safe = safe[len(safeSpaces):]
}
for i := 0; i < len(s); i++ {
if c := s[i]; c < utf8.RuneSelf && bytes.IndexByte(safe, c) < 0 {
return false
}
}
return true
}

View File

@ -0,0 +1,138 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package packages
import (
"bytes"
"io/ioutil"
"log"
"path"
"regexp"
"sort"
"strconv"
"strings"
"unicode"
"github.com/bazelbuild/bazel-gazelle/internal/config"
)
var protoRe = buildProtoRegexp()
const (
importSubexpIndex = 1
packageSubexpIndex = 2
goPackageSubexpIndex = 3
serviceSubexpIndex = 4
)
func protoFileInfo(c *config.Config, dir, rel, name string) fileInfo {
info := fileNameInfo(dir, rel, name)
content, err := ioutil.ReadFile(info.path)
if err != nil {
log.Printf("%s: error reading proto file: %v", info.path, err)
return info
}
for _, match := range protoRe.FindAllSubmatch(content, -1) {
switch {
case match[importSubexpIndex] != nil:
imp := unquoteProtoString(match[importSubexpIndex])
info.imports = append(info.imports, imp)
case match[packageSubexpIndex] != nil:
pkg := string(match[packageSubexpIndex])
if info.packageName == "" {
info.packageName = strings.Replace(pkg, ".", "_", -1)
}
case match[goPackageSubexpIndex] != nil:
gopkg := unquoteProtoString(match[goPackageSubexpIndex])
// If there's no / in the package option, then it's just a
// simple package name, not a full import path.
if strings.LastIndexByte(gopkg, '/') == -1 {
info.packageName = gopkg
} else {
if i := strings.LastIndexByte(gopkg, ';'); i != -1 {
info.importPath = gopkg[:i]
info.packageName = gopkg[i+1:]
} else {
info.importPath = gopkg
info.packageName = path.Base(gopkg)
}
}
case match[serviceSubexpIndex] != nil:
info.hasServices = true
default:
// Comment matched. Nothing to extract.
}
}
sort.Strings(info.imports)
if info.packageName == "" {
stem := strings.TrimSuffix(name, ".proto")
fs := strings.FieldsFunc(stem, func(r rune) bool {
return !(unicode.IsLetter(r) || unicode.IsNumber(r) || r == '_')
})
info.packageName = strings.Join(fs, "_")
}
return info
}
// Based on https://developers.google.com/protocol-buffers/docs/reference/proto3-spec
func buildProtoRegexp() *regexp.Regexp {
hexEscape := `\\[xX][0-9a-fA-f]{2}`
octEscape := `\\[0-7]{3}`
charEscape := `\\[abfnrtv'"\\]`
charValue := strings.Join([]string{hexEscape, octEscape, charEscape, "[^\x00\\'\\\"\\\\]"}, "|")
strLit := `'(?:` + charValue + `|")*'|"(?:` + charValue + `|')*"`
ident := `[A-Za-z][A-Za-z0-9_]*`
fullIdent := ident + `(?:\.` + ident + `)*`
importStmt := `\bimport\s*(?:public|weak)?\s*(?P<import>` + strLit + `)\s*;`
packageStmt := `\bpackage\s*(?P<package>` + fullIdent + `)\s*;`
goPackageStmt := `\boption\s*go_package\s*=\s*(?P<go_package>` + strLit + `)\s*;`
serviceStmt := `(?P<service>service)`
comment := `//[^\n]*`
protoReSrc := strings.Join([]string{importStmt, packageStmt, goPackageStmt, serviceStmt, comment}, "|")
return regexp.MustCompile(protoReSrc)
}
func unquoteProtoString(q []byte) string {
// Adjust quotes so that Unquote is happy. We need a double quoted string
// without unescaped double quote characters inside.
noQuotes := bytes.Split(q[1:len(q)-1], []byte{'"'})
if len(noQuotes) != 1 {
for i := 0; i < len(noQuotes)-1; i++ {
if len(noQuotes[i]) == 0 || noQuotes[i][len(noQuotes[i])-1] != '\\' {
noQuotes[i] = append(noQuotes[i], '\\')
}
}
q = append([]byte{'"'}, bytes.Join(noQuotes, []byte{'"'})...)
q = append(q, '"')
}
if q[0] == '\'' {
q[0] = '"'
q[len(q)-1] = '"'
}
s, err := strconv.Unquote(string(q))
if err != nil {
log.Panicf("unquoting string literal %s from proto: %v", q, err)
}
return s
}

View File

@ -0,0 +1,630 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package packages
import (
"fmt"
"log"
"path"
"sort"
"strings"
"github.com/bazelbuild/bazel-gazelle/internal/config"
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
)
// Package contains metadata about a Go package extracted from a directory.
// It fills a similar role to go/build.Package, but it separates files by
// target instead of by type, and it supports multiple platforms.
type Package struct {
// Name is the symbol found in package declarations of the .go files in
// the package. It does not include the "_test" suffix from external tests.
Name string
// Dir is an absolute path to the directory that contains the package.
Dir string
// Rel is the relative path to the package directory from the repository
// root. If the directory is the repository root itself, Rel is empty.
// Components in Rel are separated with slashes.
Rel string
// ImportPath is the string used to import this package in Go.
ImportPath string
Library, Binary, Test, XTest GoTarget
Proto ProtoTarget
HasTestdata bool
}
// GoTarget contains metadata about a buildable Go target in a package.
type GoTarget struct {
Sources, Imports PlatformStrings
COpts, CLinkOpts PlatformStrings
Cgo bool
}
// ProtoTarget contains metadata about proto files in a package.
type ProtoTarget struct {
Sources, Imports PlatformStrings
HasServices bool
// HasPbGo indicates whether unexcluded .pb.go files are present in the
// same package. They will not be in this target's sources.
HasPbGo bool
}
// PlatformStrings contains a set of strings associated with a buildable
// Go target in a package. This is used to store source file names,
// import paths, and flags.
//
// Strings are stored in four sets: generic strings, OS-specific strings,
// arch-specific strings, and OS-and-arch-specific strings. A string may not
// be duplicated within a list or across sets; however, a string may appear
// in more than one list within a set (e.g., in "linux" and "windows" within
// the OS set). Strings within each list should be sorted, though this may
// not be relied upon.
type PlatformStrings struct {
// Generic is a list of strings not specific to any platform.
Generic []string
// OS is a map from OS name (anything in config.KnownOSs) to
// OS-specific strings.
OS map[string][]string
// Arch is a map from architecture name (anything in config.KnownArchs) to
// architecture-specific strings.
Arch map[string][]string
// Platform is a map from platforms to OS and architecture-specific strings.
Platform map[config.Platform][]string
}
// IsCommand returns true if the package name is "main".
func (p *Package) IsCommand() bool {
return p.Name == "main"
}
// EmptyPackage returns an empty package. The package name and import path
// are inferred from the directory name and configuration. This is useful
// for deleting rules in directories which no longer have source files.
func EmptyPackage(c *config.Config, dir, rel string) *Package {
packageName := pathtools.RelBaseName(rel, c.GoPrefix, c.RepoRoot)
pb := packageBuilder{
name: packageName,
dir: dir,
rel: rel,
}
pb.inferImportPath(c)
return pb.build()
}
func (t *GoTarget) HasGo() bool {
return t.Sources.HasGo()
}
func (t *ProtoTarget) HasProto() bool {
return !t.Sources.IsEmpty()
}
func (ps *PlatformStrings) HasGo() bool {
return ps.firstGoFile() != ""
}
func (ps *PlatformStrings) IsEmpty() bool {
return len(ps.Generic) == 0 && len(ps.OS) == 0 && len(ps.Arch) == 0 && len(ps.Platform) == 0
}
func (ps *PlatformStrings) firstGoFile() string {
for _, f := range ps.Generic {
if strings.HasSuffix(f, ".go") {
return f
}
}
for _, fs := range ps.OS {
for _, f := range fs {
if strings.HasSuffix(f, ".go") {
return f
}
}
}
for _, fs := range ps.Arch {
for _, f := range fs {
if strings.HasSuffix(f, ".go") {
return f
}
}
}
for _, fs := range ps.Platform {
for _, f := range fs {
if strings.HasSuffix(f, ".go") {
return f
}
}
}
return ""
}
type packageBuilder struct {
name, dir, rel string
library, binary, test, xtest goTargetBuilder
proto protoTargetBuilder
hasTestdata bool
importPath, importPathFile string
}
type goTargetBuilder struct {
sources, imports, copts, clinkopts platformStringsBuilder
cgo bool
}
type protoTargetBuilder struct {
sources, imports platformStringsBuilder
hasServices, hasPbGo bool
}
type platformStringsBuilder struct {
strs map[string]platformStringInfo
}
type platformStringInfo struct {
set platformStringSet
oss map[string]bool
archs map[string]bool
platforms map[config.Platform]bool
}
type platformStringSet int
const (
genericSet platformStringSet = iota
osSet
archSet
platformSet
)
// addFile adds the file described by "info" to a target in the package "p" if
// the file is buildable.
//
// "cgo" tells whether any ".go" file in the package contains cgo code. This
// affects whether C files are added to targets.
//
// An error is returned if a file is buildable but invalid (for example, a
// test .go file containing cgo code). Files that are not buildable will not
// be added to any target (for example, .txt files).
func (pb *packageBuilder) addFile(c *config.Config, info fileInfo, cgo bool) error {
switch {
case info.category == ignoredExt || info.category == unsupportedExt ||
!cgo && (info.category == cExt || info.category == csExt) ||
c.ProtoMode == config.DisableProtoMode && info.category == protoExt:
return nil
case info.isXTest:
if info.isCgo {
return fmt.Errorf("%s: use of cgo in test not supported", info.path)
}
pb.xtest.addFile(c, info)
case info.isTest:
if info.isCgo {
return fmt.Errorf("%s: use of cgo in test not supported", info.path)
}
pb.test.addFile(c, info)
case info.category == protoExt:
pb.proto.addFile(c, info)
default:
pb.library.addFile(c, info)
}
if strings.HasSuffix(info.name, ".pb.go") {
pb.proto.hasPbGo = true
}
if info.importPath != "" {
if pb.importPath == "" {
pb.importPath = info.importPath
pb.importPathFile = info.path
} else if pb.importPath != info.importPath {
return fmt.Errorf("found import comments %q (%s) and %q (%s)", pb.importPath, pb.importPathFile, info.importPath, info.path)
}
}
return nil
}
// isBuildable returns true if anything in the package is buildable.
// This is true if the package has Go code that satisfies build constraints
// on any platform or has proto files not in legacy mode.
func (pb *packageBuilder) isBuildable(c *config.Config) bool {
return pb.firstGoFile() != "" ||
len(pb.proto.sources.strs) > 0 && c.ProtoMode == config.DefaultProtoMode
}
// firstGoFile returns the name of a .go file if the package contains at least
// one .go file, or "" otherwise.
func (pb *packageBuilder) firstGoFile() string {
goSrcs := []platformStringsBuilder{
pb.library.sources,
pb.binary.sources,
pb.test.sources,
pb.xtest.sources,
}
for _, sb := range goSrcs {
if sb.strs != nil {
for s, _ := range sb.strs {
if strings.HasSuffix(s, ".go") {
return s
}
}
}
}
return ""
}
func (pb *packageBuilder) inferImportPath(c *config.Config) error {
if pb.importPath != "" {
log.Panic("importPath already set")
}
if pb.rel == c.GoPrefixRel {
if c.GoPrefix == "" {
return fmt.Errorf("in directory %q, prefix is empty, so importpath would be empty for rules. Set a prefix with a '# gazelle:prefix' comment or with -go_prefix on the command line.", pb.dir)
}
pb.importPath = c.GoPrefix
} else {
fromPrefixRel := strings.TrimPrefix(pb.rel, c.GoPrefixRel+"/")
pb.importPath = path.Join(c.GoPrefix, fromPrefixRel)
}
return nil
}
func (pb *packageBuilder) build() *Package {
return &Package{
Name: pb.name,
Dir: pb.dir,
Rel: pb.rel,
ImportPath: pb.importPath,
Library: pb.library.build(),
Binary: pb.binary.build(),
Test: pb.test.build(),
XTest: pb.xtest.build(),
Proto: pb.proto.build(),
HasTestdata: pb.hasTestdata,
}
}
func (tb *goTargetBuilder) addFile(c *config.Config, info fileInfo) {
tb.cgo = tb.cgo || info.isCgo
add := getPlatformStringsAddFunction(c, info, nil)
add(&tb.sources, info.name)
add(&tb.imports, info.imports...)
for _, copts := range info.copts {
optAdd := add
if len(copts.tags) > 0 {
optAdd = getPlatformStringsAddFunction(c, info, copts.tags)
}
optAdd(&tb.copts, copts.opts)
}
for _, clinkopts := range info.clinkopts {
optAdd := add
if len(clinkopts.tags) > 0 {
optAdd = getPlatformStringsAddFunction(c, info, clinkopts.tags)
}
optAdd(&tb.clinkopts, clinkopts.opts)
}
}
func (tb *goTargetBuilder) build() GoTarget {
return GoTarget{
Sources: tb.sources.build(),
Imports: tb.imports.build(),
COpts: tb.copts.build(),
CLinkOpts: tb.clinkopts.build(),
Cgo: tb.cgo,
}
}
func (tb *protoTargetBuilder) addFile(c *config.Config, info fileInfo) {
add := getPlatformStringsAddFunction(c, info, nil)
add(&tb.sources, info.name)
add(&tb.imports, info.imports...)
tb.hasServices = tb.hasServices || info.hasServices
}
func (tb *protoTargetBuilder) build() ProtoTarget {
return ProtoTarget{
Sources: tb.sources.build(),
Imports: tb.imports.build(),
HasServices: tb.hasServices,
HasPbGo: tb.hasPbGo,
}
}
// getPlatformStringsAddFunction returns a function used to add strings to
// a *platformStringsBuilder under the same set of constraints. This is a
// performance optimization to avoid evaluating constraints repeatedly.
func getPlatformStringsAddFunction(c *config.Config, info fileInfo, cgoTags tagLine) func(sb *platformStringsBuilder, ss ...string) {
isOSSpecific, isArchSpecific := isOSArchSpecific(info, cgoTags)
switch {
case !isOSSpecific && !isArchSpecific:
if checkConstraints(c, "", "", info.goos, info.goarch, info.tags, cgoTags) {
return func(sb *platformStringsBuilder, ss ...string) {
for _, s := range ss {
sb.addGenericString(s)
}
}
}
case isOSSpecific && !isArchSpecific:
var osMatch []string
for _, os := range config.KnownOSs {
if checkConstraints(c, os, "", info.goos, info.goarch, info.tags, cgoTags) {
osMatch = append(osMatch, os)
}
}
if len(osMatch) > 0 {
return func(sb *platformStringsBuilder, ss ...string) {
for _, s := range ss {
sb.addOSString(s, osMatch)
}
}
}
case !isOSSpecific && isArchSpecific:
var archMatch []string
for _, arch := range config.KnownArchs {
if checkConstraints(c, "", arch, info.goos, info.goarch, info.tags, cgoTags) {
archMatch = append(archMatch, arch)
}
}
if len(archMatch) > 0 {
return func(sb *platformStringsBuilder, ss ...string) {
for _, s := range ss {
sb.addArchString(s, archMatch)
}
}
}
default:
var platformMatch []config.Platform
for _, platform := range config.KnownPlatforms {
if checkConstraints(c, platform.OS, platform.Arch, info.goos, info.goarch, info.tags, cgoTags) {
platformMatch = append(platformMatch, platform)
}
}
if len(platformMatch) > 0 {
return func(sb *platformStringsBuilder, ss ...string) {
for _, s := range ss {
sb.addPlatformString(s, platformMatch)
}
}
}
}
return func(_ *platformStringsBuilder, _ ...string) {}
}
func (sb *platformStringsBuilder) addGenericString(s string) {
if sb.strs == nil {
sb.strs = make(map[string]platformStringInfo)
}
sb.strs[s] = platformStringInfo{set: genericSet}
}
func (sb *platformStringsBuilder) addOSString(s string, oss []string) {
if sb.strs == nil {
sb.strs = make(map[string]platformStringInfo)
}
si, ok := sb.strs[s]
if !ok {
si.set = osSet
si.oss = make(map[string]bool)
}
switch si.set {
case genericSet:
return
case osSet:
for _, os := range oss {
si.oss[os] = true
}
default:
si.convertToPlatforms()
for _, os := range oss {
for _, arch := range config.KnownOSArchs[os] {
si.platforms[config.Platform{OS: os, Arch: arch}] = true
}
}
}
sb.strs[s] = si
}
func (sb *platformStringsBuilder) addArchString(s string, archs []string) {
if sb.strs == nil {
sb.strs = make(map[string]platformStringInfo)
}
si, ok := sb.strs[s]
if !ok {
si.set = archSet
si.archs = make(map[string]bool)
}
switch si.set {
case genericSet:
return
case archSet:
for _, arch := range archs {
si.archs[arch] = true
}
default:
si.convertToPlatforms()
for _, arch := range archs {
for _, os := range config.KnownArchOSs[arch] {
si.platforms[config.Platform{OS: os, Arch: arch}] = true
}
}
}
sb.strs[s] = si
}
func (sb *platformStringsBuilder) addPlatformString(s string, platforms []config.Platform) {
if sb.strs == nil {
sb.strs = make(map[string]platformStringInfo)
}
si, ok := sb.strs[s]
if !ok {
si.set = platformSet
si.platforms = make(map[config.Platform]bool)
}
switch si.set {
case genericSet:
return
default:
si.convertToPlatforms()
for _, p := range platforms {
si.platforms[p] = true
}
}
sb.strs[s] = si
}
func (sb *platformStringsBuilder) build() PlatformStrings {
var ps PlatformStrings
for s, si := range sb.strs {
switch si.set {
case genericSet:
ps.Generic = append(ps.Generic, s)
case osSet:
if ps.OS == nil {
ps.OS = make(map[string][]string)
}
for os, _ := range si.oss {
ps.OS[os] = append(ps.OS[os], s)
}
case archSet:
if ps.Arch == nil {
ps.Arch = make(map[string][]string)
}
for arch, _ := range si.archs {
ps.Arch[arch] = append(ps.Arch[arch], s)
}
case platformSet:
if ps.Platform == nil {
ps.Platform = make(map[config.Platform][]string)
}
for p, _ := range si.platforms {
ps.Platform[p] = append(ps.Platform[p], s)
}
}
}
sort.Strings(ps.Generic)
if ps.OS != nil {
for _, ss := range ps.OS {
sort.Strings(ss)
}
}
if ps.Arch != nil {
for _, ss := range ps.Arch {
sort.Strings(ss)
}
}
if ps.Platform != nil {
for _, ss := range ps.Platform {
sort.Strings(ss)
}
}
return ps
}
func (si *platformStringInfo) convertToPlatforms() {
switch si.set {
case genericSet:
log.Panic("cannot convert generic string to platforms")
case platformSet:
return
case osSet:
si.set = platformSet
si.platforms = make(map[config.Platform]bool)
for os, _ := range si.oss {
for _, arch := range config.KnownOSArchs[os] {
si.platforms[config.Platform{OS: os, Arch: arch}] = true
}
}
si.oss = nil
case archSet:
si.set = platformSet
si.platforms = make(map[config.Platform]bool)
for arch, _ := range si.archs {
for _, os := range config.KnownArchOSs[arch] {
si.platforms[config.Platform{OS: os, Arch: arch}] = true
}
}
si.archs = nil
}
}
// MapSlice applies a function that processes slices of strings to the strings
// in "ps" and returns a new PlatformStrings with the results.
func (ps *PlatformStrings) MapSlice(f func([]string) ([]string, error)) (PlatformStrings, []error) {
var errors []error
mapSlice := func(ss []string) []string {
rs, err := f(ss)
if err != nil {
errors = append(errors, err)
return nil
}
return rs
}
mapStringMap := func(m map[string][]string) map[string][]string {
if m == nil {
return nil
}
rm := make(map[string][]string)
for k, ss := range m {
ss = mapSlice(ss)
if len(ss) > 0 {
rm[k] = ss
}
}
if len(rm) == 0 {
return nil
}
return rm
}
mapPlatformMap := func(m map[config.Platform][]string) map[config.Platform][]string {
if m == nil {
return nil
}
rm := make(map[config.Platform][]string)
for k, ss := range m {
ss = mapSlice(ss)
if len(ss) > 0 {
rm[k] = ss
}
}
if len(rm) == 0 {
return nil
}
return rm
}
result := PlatformStrings{
Generic: mapSlice(ps.Generic),
OS: mapStringMap(ps.OS),
Arch: mapStringMap(ps.Arch),
Platform: mapPlatformMap(ps.Platform),
}
return result, errors
}

View File

@ -0,0 +1,460 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package packages
import (
"go/build"
"io/ioutil"
"log"
"os"
"path"
"path/filepath"
"strings"
"github.com/bazelbuild/bazel-gazelle/internal/config"
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
bf "github.com/bazelbuild/buildtools/build"
)
// A WalkFunc is a callback called by Walk in each visited directory.
//
// dir is the absolute file system path to the directory being visited.
//
// rel is the relative slash-separated path to the directory from the
// repository root. Will be "" for the repository root directory itself.
//
// c is the configuration for the current directory. This may have been
// modified by directives in the directory's build file.
//
// pkg contains information about how to build source code in the directory.
// Will be nil for directories that don't contain buildable code, directories
// that Gazelle was not asked update, and directories where Walk
// encountered errors.
//
// oldFile is the existing build file in the directory. Will be nil if there
// was no file.
//
// isUpdateDir is true for directories that Gazelle was asked to update.
type WalkFunc func(dir, rel string, c *config.Config, pkg *Package, oldFile *bf.File, isUpdateDir bool)
// Walk traverses a directory tree. In each directory, Walk parses existing
// build files. In directories that Gazelle was asked to update (c.Dirs), Walk
// also parses source files and infers build information.
//
// c is the base configuration for the repository. c may be copied and modified
// by directives found in build files.
//
// root is an absolute file path to the directory to traverse.
//
// f is a function that will be called for each visited directory.
func Walk(c *config.Config, root string, f WalkFunc) {
// Determine relative paths for the directories to be updated.
var updateRels []string
for _, dir := range c.Dirs {
rel, err := filepath.Rel(c.RepoRoot, dir)
if err != nil {
// This should have been verified when c was built.
log.Panicf("%s: not a subdirectory of repository root %q", dir, c.RepoRoot)
}
rel = filepath.ToSlash(rel)
if rel == "." || rel == "/" {
rel = ""
}
updateRels = append(updateRels, rel)
}
rootRel, err := filepath.Rel(c.RepoRoot, root)
if err != nil {
log.Panicf("%s: not a subdirectory of repository root %q", root, c.RepoRoot)
}
if rootRel == "." || rootRel == "/" {
rootRel = ""
}
symlinks := symlinkResolver{root: root, visited: []string{root}}
// visit walks the directory tree in post-order. It returns whether the
// given directory or any subdirectory contained a build file or buildable
// source code. This affects whether "testdata" directories are considered
// data dependencies.
var visit func(*config.Config, string, string, bool, []string) bool
visit = func(c *config.Config, dir, rel string, isUpdateDir bool, excluded []string) bool {
// Check if this directory should be updated.
if !isUpdateDir {
for _, updateRel := range updateRels {
if pathtools.HasPrefix(rel, updateRel) {
isUpdateDir = true
}
}
}
// Look for an existing BUILD file.
var oldFile *bf.File
haveError := false
for _, base := range c.ValidBuildFileNames {
oldPath := filepath.Join(dir, base)
st, err := os.Stat(oldPath)
if os.IsNotExist(err) || err == nil && st.IsDir() {
continue
}
oldData, err := ioutil.ReadFile(oldPath)
if err != nil {
log.Print(err)
haveError = true
continue
}
if oldFile != nil {
log.Printf("in directory %s, multiple Bazel files are present: %s, %s",
dir, filepath.Base(oldFile.Path), base)
haveError = true
continue
}
oldFile, err = bf.Parse(oldPath, oldData)
if err != nil {
log.Print(err)
haveError = true
continue
}
}
// Process directives in the build file. If this is a vendor directory,
// set an empty prefix.
if path.Base(rel) == "vendor" {
cCopy := *c
cCopy.GoPrefix = ""
cCopy.GoPrefixRel = rel
c = &cCopy
}
var directives []config.Directive
if oldFile != nil {
directives = config.ParseDirectives(oldFile)
c = config.ApplyDirectives(c, directives, rel)
}
c = config.InferProtoMode(c, rel, oldFile, directives)
var ignore bool
for _, d := range directives {
switch d.Key {
case "exclude":
excluded = append(excluded, d.Value)
case "ignore":
ignore = true
}
}
// List files and subdirectories.
files, err := ioutil.ReadDir(dir)
if err != nil {
log.Print(err)
return false
}
if c.ProtoMode == config.DefaultProtoMode {
excluded = append(excluded, findPbGoFiles(files, excluded)...)
}
var pkgFiles, otherFiles, subdirs []string
for _, f := range files {
base := f.Name()
switch {
case base == "" || base[0] == '.' || base[0] == '_' || isExcluded(excluded, base):
continue
case f.IsDir():
subdirs = append(subdirs, base)
case strings.HasSuffix(base, ".go") ||
(c.ProtoMode != config.DisableProtoMode && strings.HasSuffix(base, ".proto")):
pkgFiles = append(pkgFiles, base)
case f.Mode()&os.ModeSymlink != 0 && symlinks.follow(dir, base):
subdirs = append(subdirs, base)
default:
otherFiles = append(otherFiles, base)
}
}
// Recurse into subdirectories.
hasTestdata := false
subdirHasPackage := false
for _, sub := range subdirs {
subdirExcluded := excludedForSubdir(excluded, sub)
hasPackage := visit(c, filepath.Join(dir, sub), path.Join(rel, sub), isUpdateDir, subdirExcluded)
if sub == "testdata" && !hasPackage {
hasTestdata = true
}
subdirHasPackage = subdirHasPackage || hasPackage
}
hasPackage := subdirHasPackage || oldFile != nil
if haveError || !isUpdateDir || ignore {
f(dir, rel, c, nil, oldFile, false)
return hasPackage
}
// Build a package from files in this directory.
var genFiles []string
if oldFile != nil {
genFiles = findGenFiles(oldFile, excluded)
}
pkg := buildPackage(c, dir, rel, pkgFiles, otherFiles, genFiles, hasTestdata)
f(dir, rel, c, pkg, oldFile, true)
return hasPackage || pkg != nil
}
visit(c, root, rootRel, false, nil)
}
// buildPackage reads source files in a given directory and returns a Package
// containing information about those files and how to build them.
//
// If no buildable .go files are found in the directory, nil will be returned.
// If the directory contains multiple buildable packages, the package whose
// name matches the directory base name will be returned. If there is no such
// package or if an error occurs, an error will be logged, and nil will be
// returned.
func buildPackage(c *config.Config, dir, rel string, pkgFiles, otherFiles, genFiles []string, hasTestdata bool) *Package {
// Process .go and .proto files first, since these determine the package name.
packageMap := make(map[string]*packageBuilder)
cgo := false
var pkgFilesWithUnknownPackage []fileInfo
for _, f := range pkgFiles {
var info fileInfo
switch path.Ext(f) {
case ".go":
info = goFileInfo(c, dir, rel, f)
case ".proto":
info = protoFileInfo(c, dir, rel, f)
default:
log.Panicf("file cannot determine package name: %s", f)
}
if info.packageName == "" {
pkgFilesWithUnknownPackage = append(pkgFilesWithUnknownPackage, info)
continue
}
if info.packageName == "documentation" {
// go/build ignores this package
continue
}
cgo = cgo || info.isCgo
if _, ok := packageMap[info.packageName]; !ok {
packageMap[info.packageName] = &packageBuilder{
name: info.packageName,
dir: dir,
rel: rel,
hasTestdata: hasTestdata,
}
}
if err := packageMap[info.packageName].addFile(c, info, false); err != nil {
log.Print(err)
}
}
// Select a package to generate rules for.
pkg, err := selectPackage(c, dir, packageMap)
if err != nil {
if _, ok := err.(*build.NoGoError); !ok {
log.Print(err)
}
return nil
}
// Add files with unknown packages. This happens when there are parse
// or I/O errors. We should keep the file in the srcs list and let the
// compiler deal with the error.
for _, info := range pkgFilesWithUnknownPackage {
if err := pkg.addFile(c, info, cgo); err != nil {
log.Print(err)
}
}
// Process the other static files.
for _, file := range otherFiles {
info := otherFileInfo(dir, rel, file)
if err := pkg.addFile(c, info, cgo); err != nil {
log.Print(err)
}
}
// Process generated files. Note that generated files may have the same names
// as static files. Bazel will use the generated files, but we will look at
// the content of static files, assuming they will be the same.
staticFiles := make(map[string]bool)
for _, f := range pkgFiles {
staticFiles[f] = true
}
for _, f := range otherFiles {
staticFiles[f] = true
}
for _, f := range genFiles {
if staticFiles[f] {
continue
}
info := fileNameInfo(dir, rel, f)
if err := pkg.addFile(c, info, cgo); err != nil {
log.Print(err)
}
}
if pkg.importPath == "" {
if err := pkg.inferImportPath(c); err != nil {
log.Print(err)
return nil
}
}
return pkg.build()
}
func selectPackage(c *config.Config, dir string, packageMap map[string]*packageBuilder) (*packageBuilder, error) {
buildablePackages := make(map[string]*packageBuilder)
for name, pkg := range packageMap {
if pkg.isBuildable(c) {
buildablePackages[name] = pkg
}
}
if len(buildablePackages) == 0 {
return nil, &build.NoGoError{Dir: dir}
}
if len(buildablePackages) == 1 {
for _, pkg := range buildablePackages {
return pkg, nil
}
}
if pkg, ok := buildablePackages[defaultPackageName(c, dir)]; ok {
return pkg, nil
}
err := &build.MultiplePackageError{Dir: dir}
for name, pkg := range buildablePackages {
// Add the first file for each package for the error message.
// Error() method expects these lists to be the same length. File
// lists must be non-empty. These lists are only created by
// buildPackage for packages with .go files present.
err.Packages = append(err.Packages, name)
err.Files = append(err.Files, pkg.firstGoFile())
}
return nil, err
}
func defaultPackageName(c *config.Config, dir string) string {
if dir != c.RepoRoot {
return filepath.Base(dir)
}
name := path.Base(c.GoPrefix)
if name == "." || name == "/" {
// This can happen if go_prefix is empty or is all slashes.
return "unnamed"
}
return name
}
func findGenFiles(f *bf.File, excluded []string) []string {
var strs []string
for _, r := range f.Rules("") {
for _, key := range []string{"out", "outs"} {
switch e := r.Attr(key).(type) {
case *bf.StringExpr:
strs = append(strs, e.Value)
case *bf.ListExpr:
for _, elem := range e.List {
if s, ok := elem.(*bf.StringExpr); ok {
strs = append(strs, s.Value)
}
}
}
}
}
var genFiles []string
for _, s := range strs {
if !isExcluded(excluded, s) {
genFiles = append(genFiles, s)
}
}
return genFiles
}
func findPbGoFiles(files []os.FileInfo, excluded []string) []string {
var pbGoFiles []string
for _, f := range files {
name := f.Name()
if strings.HasSuffix(name, ".proto") && !isExcluded(excluded, name) {
pbGoFiles = append(pbGoFiles, name[:len(name)-len(".proto")]+".pb.go")
}
}
return pbGoFiles
}
func isExcluded(excluded []string, base string) bool {
for _, e := range excluded {
if base == e {
return true
}
}
return false
}
func excludedForSubdir(excluded []string, subdir string) []string {
var filtered []string
for _, e := range excluded {
i := strings.IndexByte(e, '/')
if i < 0 || i == len(e)-1 || e[:i] != subdir {
continue
}
filtered = append(filtered, e[i+1:])
}
return filtered
}
type symlinkResolver struct {
root string
visited []string
}
// Decide if symlink dir/base should be followed.
func (r *symlinkResolver) follow(dir, base string) bool {
if dir == r.root && strings.HasPrefix(base, "bazel-") {
// Links such as bazel-<workspace>, bazel-out, bazel-genfiles are created by
// Bazel to point to internal build directories.
return false
}
// See if the symlink points to a tree that has been already visited.
fullpath := filepath.Join(dir, base)
dest, err := filepath.EvalSymlinks(fullpath)
if err != nil {
return false
}
if !filepath.IsAbs(dest) {
dest, err = filepath.Abs(filepath.Join(dir, dest))
if err != nil {
return false
}
}
for _, p := range r.visited {
if pathtools.HasPrefix(dest, p) || pathtools.HasPrefix(p, dest) {
return false
}
}
r.visited = append(r.visited, dest)
stat, err := os.Stat(fullpath)
if err != nil {
return false
}
return stat.IsDir()
}

View File

@ -0,0 +1,22 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = ["path.go"],
importpath = "github.com/bazelbuild/bazel-gazelle/internal/pathtools",
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,63 @@
/* Copyright 2018 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package pathtools
import (
"path"
"path/filepath"
"strings"
)
// HasPrefix returns whether the slash-separated path p has the given
// prefix. Unlike strings.HasPrefix, this function respects component
// boundaries, so "/home/foo" is not a prefix is "/home/foobar/baz". If the
// prefix is empty, this function always returns true.
func HasPrefix(p, prefix string) bool {
return prefix == "" || p == prefix || strings.HasPrefix(p, prefix+"/")
}
// TrimPrefix returns p without the provided prefix. If p doesn't start
// with prefix, it returns p unchanged. Unlike strings.HasPrefix, this function
// respects component boundaries (assuming slash-separated paths), so
// TrimPrefix("foo/bar", "foo") returns "baz".
func TrimPrefix(p, prefix string) string {
if prefix == "" {
return p
}
if prefix == p {
return ""
}
return strings.TrimPrefix(p, prefix+"/")
}
// RelBaseName returns the base name for rel, a slash-separated path relative
// to the repository root. If rel is empty, RelBaseName returns the base name
// of prefix. If prefix is empty, RelBaseName returns the base name of root,
// the absolute file path of the repository root directory. If that's empty
// to, then RelBaseName returns "root".
func RelBaseName(rel, prefix, root string) string {
base := path.Base(rel)
if base == "." || base == "/" {
base = path.Base(prefix)
}
if base == "." || base == "/" {
base = filepath.Base(root)
}
if base == "." || base == "/" {
base = "root"
}
return base
}

View File

@ -0,0 +1,34 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"dep.go",
"remote.go",
"repo.go",
],
importpath = "github.com/bazelbuild/bazel-gazelle/internal/repos",
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
deps = [
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/rules:go_default_library",
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
"//vendor/github.com/pelletier/go-toml:go_default_library",
"//vendor/golang.org/x/tools/go/vcs:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,55 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package repos
import (
"io/ioutil"
"github.com/bazelbuild/bazel-gazelle/internal/label"
toml "github.com/pelletier/go-toml"
)
type depLockFile struct {
Projects []depProject `toml:"projects"`
}
type depProject struct {
Name string `toml:"name"`
Revision string `toml:"revision"`
Source string `toml:"source"`
}
func importRepoRulesDep(filename string) ([]Repo, error) {
data, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
var file depLockFile
if err := toml.Unmarshal(data, &file); err != nil {
return nil, err
}
var repos []Repo
for _, p := range file.Projects {
repos = append(repos, Repo{
Name: label.ImportPathToBazelRepoName(p.Name),
GoPrefix: p.Name,
Commit: p.Revision,
Remote: p.Source,
})
}
return repos, nil
}

View File

@ -0,0 +1,327 @@
/* Copyright 2018 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package repos
import (
"bytes"
"fmt"
"os/exec"
"path"
"regexp"
"strings"
"sync"
"github.com/bazelbuild/bazel-gazelle/internal/label"
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
"golang.org/x/tools/go/vcs"
)
// UpdateRepo returns an object describing a repository at the most recent
// commit or version tag.
//
// This function uses RemoteCache to retrieve information about the repository.
// Depending on how the RemoteCache was initialized and used earlier, some
// information may already be locally available. Frequently though, information
// will be fetched over the network, so this function may be slow.
func UpdateRepo(rc *RemoteCache, importPath string) (Repo, error) {
root, name, err := rc.Root(importPath)
if err != nil {
return Repo{}, err
}
remote, vcs, err := rc.Remote(root)
if err != nil {
return Repo{}, err
}
commit, tag, err := rc.Head(remote, vcs)
if err != nil {
return Repo{}, err
}
repo := Repo{
Name: name,
GoPrefix: root,
Commit: commit,
Tag: tag,
Remote: remote,
VCS: vcs,
}
return repo, nil
}
// RemoteCache stores information about external repositories. The cache may
// be initialized with information about known repositories, i.e., those listed
// in the WORKSPACE file and mentioned on the command line. Other information
// is retrieved over the network.
//
// Public methods of RemoteCache may be slow in cases where a network fetch
// is needed. Public methods may be called concurrently.
type RemoteCache struct {
// RepoRootForImportPath is vcs.RepoRootForImportPath by default. It may
// be overridden so that tests may avoid accessing the network.
RepoRootForImportPath func(string, bool) (*vcs.RepoRoot, error)
// HeadCmd returns the latest commit on the default branch in the given
// repository. This is used by Head. It may be stubbed out for tests.
HeadCmd func(remote, vcs string) (string, error)
root, remote, head remoteCacheMap
}
// remoteCacheMap is a thread-safe, idempotent cache. It is used to store
// information which should be fetched over the network no more than once.
// This follows the Memo pattern described in The Go Programming Language,
// section 9.7.
type remoteCacheMap struct {
mu sync.Mutex
cache map[string]*remoteCacheEntry
}
type remoteCacheEntry struct {
value interface{}
err error
// ready is nil for entries that were added when the cache was initialized.
// It is non-nil for other entries. It is closed when an entry is ready,
// i.e., the operation loading the entry completed.
ready chan struct{}
}
type rootValue struct {
root, name string
}
type remoteValue struct {
remote, vcs string
}
type headValue struct {
commit, tag string
}
// NewRemoteCache creates a new RemoteCache with a set of known repositories.
// The Root and Remote methods will return information about repositories listed
// here without accessing the network. However, the Head method will still
// access the network for these repositories to retrieve information about new
// versions.
func NewRemoteCache(knownRepos []Repo) *RemoteCache {
r := &RemoteCache{
RepoRootForImportPath: vcs.RepoRootForImportPath,
HeadCmd: defaultHeadCmd,
root: remoteCacheMap{cache: make(map[string]*remoteCacheEntry)},
remote: remoteCacheMap{cache: make(map[string]*remoteCacheEntry)},
head: remoteCacheMap{cache: make(map[string]*remoteCacheEntry)},
}
for _, repo := range knownRepos {
r.root.cache[repo.GoPrefix] = &remoteCacheEntry{
value: rootValue{
root: repo.GoPrefix,
name: repo.Name,
},
}
if repo.Remote != "" {
r.remote.cache[repo.GoPrefix] = &remoteCacheEntry{
value: remoteValue{
remote: repo.Remote,
vcs: repo.VCS,
},
}
}
}
return r
}
var gopkginPattern = regexp.MustCompile("^(gopkg.in/(?:[^/]+/)?[^/]+\\.v\\d+)(?:/|$)")
var knownPrefixes = []struct {
prefix string
missing int
}{
{prefix: "golang.org/x", missing: 1},
{prefix: "google.golang.org", missing: 1},
{prefix: "cloud.google.com", missing: 1},
{prefix: "github.com", missing: 2},
}
// Root returns the portion of an import path that corresponds to the root
// directory of the repository containing the given import path. For example,
// given "golang.org/x/tools/go/loader", this will return "golang.org/x/tools".
// The workspace name of the repository is also returned. This may be a custom
// name set in WORKSPACE, or it may be a generated name based on the root path.
func (r *RemoteCache) Root(importPath string) (root, name string, err error) {
// Try prefixes of the import path in the cache, but don't actually go out
// to vcs yet. We do this before handling known special cases because
// the cache is pre-populated with repository rules, and we want to use their
// names if we can.
prefix := importPath
for {
v, ok, err := r.root.get(prefix)
if ok {
if err != nil {
return "", "", err
}
value := v.(rootValue)
return value.root, value.name, nil
}
prefix = path.Dir(prefix)
if prefix == "." || prefix == "/" {
break
}
}
// Try known prefixes.
for _, p := range knownPrefixes {
if pathtools.HasPrefix(importPath, p.prefix) {
rest := pathtools.TrimPrefix(importPath, p.prefix)
var components []string
if rest != "" {
components = strings.Split(rest, "/")
}
if len(components) < p.missing {
return "", "", fmt.Errorf("import path %q is shorter than the known prefix %q", importPath, p.prefix)
}
root = p.prefix
for _, c := range components[:p.missing] {
root = path.Join(root, c)
}
name = label.ImportPathToBazelRepoName(root)
return root, name, nil
}
}
// gopkg.in is special, and might have either one or two levels of
// missing paths. See http://labix.org/gopkg.in for URL patterns.
if match := gopkginPattern.FindStringSubmatch(importPath); len(match) > 0 {
root = match[1]
name = label.ImportPathToBazelRepoName(root)
return root, name, nil
}
// Find the prefix using vcs and cache the result.
v, err := r.root.ensure(importPath, func() (interface{}, error) {
res, err := r.RepoRootForImportPath(importPath, false)
if err != nil {
return nil, err
}
return rootValue{res.Root, label.ImportPathToBazelRepoName(res.Root)}, nil
})
if err != nil {
return "", "", err
}
value := v.(rootValue)
return value.root, value.name, nil
}
// Remote returns the VCS name and the remote URL for a repository with the
// given root import path. This is suitable for creating new repository rules.
func (r *RemoteCache) Remote(root string) (remote, vcs string, err error) {
v, err := r.remote.ensure(root, func() (interface{}, error) {
repo, err := r.RepoRootForImportPath(root, false)
if err != nil {
return nil, err
}
return remoteValue{remote: repo.Repo, vcs: repo.VCS.Cmd}, nil
})
if err != nil {
return "", "", err
}
value := v.(remoteValue)
return value.remote, value.vcs, nil
}
// Head returns the most recent commit id on the default branch and latest
// version tag for the given remote repository. The tag "" is returned if
// no latest version was found.
//
// TODO(jayconrod): support VCS other than git.
// TODO(jayconrod): support version tags. "" is always returned.
func (r *RemoteCache) Head(remote, vcs string) (commit, tag string, err error) {
if vcs != "git" {
return "", "", fmt.Errorf("could not locate recent commit in repo %q with unknown version control scheme %q", remote, vcs)
}
v, err := r.head.ensure(remote, func() (interface{}, error) {
commit, err := r.HeadCmd(remote, vcs)
if err != nil {
return nil, err
}
return headValue{commit: commit}, nil
})
if err != nil {
return "", "", err
}
value := v.(headValue)
return value.commit, value.tag, nil
}
func defaultHeadCmd(remote, vcs string) (string, error) {
switch vcs {
case "local":
return "", nil
case "git":
cmd := exec.Command("git", "ls-remote", "--", remote, "HEAD")
out, err := cmd.Output()
if err != nil {
return "", err
}
ix := bytes.IndexByte(out, '\t')
if ix < 0 {
return "", fmt.Errorf("could not parse output for git ls-remote for %q", remote)
}
return string(out[:ix]), nil
default:
return "", fmt.Errorf("unknown version control system: %s", vcs)
}
}
// get retrieves a value associated with the given key from the cache. ok will
// be true if the key exists in the cache, even if it's in the process of
// being fetched.
func (m *remoteCacheMap) get(key string) (value interface{}, ok bool, err error) {
m.mu.Lock()
e, ok := m.cache[key]
m.mu.Unlock()
if !ok {
return nil, ok, nil
}
if e.ready != nil {
<-e.ready
}
return e.value, ok, e.err
}
// ensure retreives a value associated with the given key from the cache. If
// the key does not exist in the cache, the load function will be called,
// and its result will be associated with the key. The load function will not
// be called more than once for any key.
func (m *remoteCacheMap) ensure(key string, load func() (interface{}, error)) (interface{}, error) {
m.mu.Lock()
e, ok := m.cache[key]
if !ok {
e = &remoteCacheEntry{ready: make(chan struct{})}
m.cache[key] = e
m.mu.Unlock()
e.value, e.err = load()
close(e.ready)
} else {
m.mu.Unlock()
if e.ready != nil {
<-e.ready
}
}
return e.value, e.err
}

View File

@ -0,0 +1,198 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package repos
import (
"fmt"
"os"
"path/filepath"
"sort"
"strings"
"github.com/bazelbuild/bazel-gazelle/internal/rules"
bf "github.com/bazelbuild/buildtools/build"
)
// Repo describes an external repository rule declared in a Bazel
// WORKSPACE file.
type Repo struct {
// Name is the value of the "name" attribute of the repository rule.
Name string
// GoPrefix is the portion of the Go import path for the root of this
// repository. Usually the same as Remote.
GoPrefix string
// Commit is the revision at which a repository is checked out (for example,
// a Git commit id).
Commit string
// Tag is the name of the version at which a repository is checked out.
Tag string
// Remote is the URL the repository can be cloned or checked out from.
Remote string
// VCS is the version control system used to check out the repository.
// May also be "http" for HTTP archives.
VCS string
}
type byName []Repo
func (s byName) Len() int { return len(s) }
func (s byName) Less(i, j int) bool { return s[i].Name < s[j].Name }
func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
type lockFileFormat int
const (
unknownFormat lockFileFormat = iota
depFormat
)
var lockFileParsers = map[lockFileFormat]func(string) ([]Repo, error){
depFormat: importRepoRulesDep,
}
// ImportRepoRules reads the lock file of a vendoring tool and returns
// a list of equivalent repository rules that can be merged into a WORKSPACE
// file. The format of the file is inferred from its basename. Currently,
// only Gopkg.lock is supported.
func ImportRepoRules(filename string) ([]bf.Expr, error) {
format := getLockFileFormat(filename)
if format == unknownFormat {
return nil, fmt.Errorf(`%s: unrecognized lock file format. Expected "Gopkg.lock"`, filename)
}
parser := lockFileParsers[format]
repos, err := parser(filename)
if err != nil {
return nil, fmt.Errorf("error parsing %q: %v", filename, err)
}
sort.Stable(byName(repos))
rules := make([]bf.Expr, 0, len(repos))
for _, repo := range repos {
rules = append(rules, GenerateRule(repo))
}
return rules, nil
}
func getLockFileFormat(filename string) lockFileFormat {
switch filepath.Base(filename) {
case "Gopkg.lock":
return depFormat
default:
return unknownFormat
}
}
// GenerateRule returns a repository rule for the given repository that can
// be written in a WORKSPACE file.
func GenerateRule(repo Repo) bf.Expr {
attrs := []rules.KeyValue{
{Key: "name", Value: repo.Name},
{Key: "commit", Value: repo.Commit},
{Key: "importpath", Value: repo.GoPrefix},
}
if repo.Remote != "" {
attrs = append(attrs, rules.KeyValue{Key: "remote", Value: repo.Remote})
}
if repo.VCS != "" {
attrs = append(attrs, rules.KeyValue{Key: "vcs", Value: repo.VCS})
}
return rules.NewRule("go_repository", attrs)
}
// FindExternalRepo attempts to locate the directory where Bazel has fetched
// the external repository with the given name. An error is returned if the
// repository directory cannot be located.
func FindExternalRepo(repoRoot, name string) (string, error) {
// See https://docs.bazel.build/versions/master/output_directories.html
// for documentation on Bazel directory layout.
// We expect the bazel-out symlink in the workspace root directory to point to
// <output-base>/execroot/<workspace-name>/bazel-out
// We expect the external repository to be checked out at
// <output-base>/external/<name>
// Note that users can change the prefix for most of the Bazel symlinks with
// --symlink_prefix, but this does not include bazel-out.
externalPath := strings.Join([]string{repoRoot, "bazel-out", "..", "..", "..", "external", name}, string(os.PathSeparator))
cleanPath, err := filepath.EvalSymlinks(externalPath)
if err != nil {
return "", err
}
st, err := os.Stat(cleanPath)
if err != nil {
return "", err
}
if !st.IsDir() {
return "", fmt.Errorf("%s: not a directory", externalPath)
}
return cleanPath, nil
}
// ListRepositories extracts metadata about repositories declared in a
// WORKSPACE file.
//
// The set of repositories returned is necessarily incomplete, since we don't
// evaluate the file, and repositories may be declared in macros in other files.
func ListRepositories(workspace *bf.File) []Repo {
var repos []Repo
for _, e := range workspace.Stmt {
call, ok := e.(*bf.CallExpr)
if !ok {
continue
}
r := bf.Rule{Call: call}
name := r.Name()
if name == "" {
continue
}
var repo Repo
switch r.Kind() {
case "go_repository":
// TODO(jayconrod): extract other fields needed by go_repository.
// Currently, we don't use the result of this function to produce new
// go_repository rules, so it doesn't matter.
goPrefix := r.AttrString("importpath")
revision := r.AttrString("commit")
remote := r.AttrString("remote")
vcs := r.AttrString("vcs")
if goPrefix == "" {
continue
}
repo = Repo{
Name: name,
GoPrefix: goPrefix,
Commit: revision,
Remote: remote,
VCS: vcs,
}
// TODO(jayconrod): infer from {new_,}git_repository, {new_,}http_archive,
// local_repository.
default:
continue
}
repos = append(repos, repo)
}
// TODO(jayconrod): look for directives that describe repositories that
// aren't declared in the top-level of WORKSPACE (e.g., behind a macro).
return repos
}

View File

@ -0,0 +1,35 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"index.go",
"resolve.go",
"resolve_external.go",
"resolve_vendored.go",
"std_package_list.go",
],
importpath = "github.com/bazelbuild/bazel-gazelle/internal/resolve",
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
deps = [
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/repos:go_default_library",
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,346 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package resolve
import (
"fmt"
"log"
"path"
"path/filepath"
"strings"
"github.com/bazelbuild/bazel-gazelle/internal/config"
"github.com/bazelbuild/bazel-gazelle/internal/label"
bf "github.com/bazelbuild/buildtools/build"
)
// RuleIndex is a table of rules in a workspace, indexed by label and by
// import path. Used by Resolver to map import paths to labels.
type RuleIndex struct {
rules []*ruleRecord
labelMap map[label.Label]*ruleRecord
importMap map[importSpec][]*ruleRecord
}
// ruleRecord contains information about a rule relevant to import indexing.
type ruleRecord struct {
rule bf.Rule
label label.Label
lang config.Language
importedAs []importSpec
embedded bool
}
// importSpec describes a package to be imported. Language is specified, since
// different languages have different formats for their imports.
type importSpec struct {
lang config.Language
imp string
}
func NewRuleIndex() *RuleIndex {
return &RuleIndex{
labelMap: make(map[label.Label]*ruleRecord),
}
}
// AddRulesFromFile adds existing rules to the index from file
// (which must not be nil).
func (ix *RuleIndex) AddRulesFromFile(c *config.Config, file *bf.File) {
buildRel, err := filepath.Rel(c.RepoRoot, file.Path)
if err != nil {
log.Panicf("file not in repo: %s", file.Path)
}
buildRel = path.Dir(filepath.ToSlash(buildRel))
if buildRel == "." || buildRel == "/" {
buildRel = ""
}
for _, stmt := range file.Stmt {
if call, ok := stmt.(*bf.CallExpr); ok {
ix.addRule(call, c.GoPrefix, buildRel)
}
}
}
func (ix *RuleIndex) addRule(call *bf.CallExpr, goPrefix, buildRel string) {
rule := bf.Rule{Call: call}
record := &ruleRecord{
rule: rule,
label: label.New("", buildRel, rule.Name()),
}
if _, ok := ix.labelMap[record.label]; ok {
log.Printf("multiple rules found with label %s", record.label)
return
}
kind := rule.Kind()
switch {
case isGoLibrary(kind):
record.lang = config.GoLang
if imp := rule.AttrString("importpath"); imp != "" {
record.importedAs = []importSpec{{lang: config.GoLang, imp: imp}}
}
// Additional proto imports may be added in Finish.
case kind == "proto_library":
record.lang = config.ProtoLang
for _, s := range findSources(rule, buildRel, ".proto") {
record.importedAs = append(record.importedAs, importSpec{lang: config.ProtoLang, imp: s})
}
default:
return
}
ix.rules = append(ix.rules, record)
ix.labelMap[record.label] = record
}
// Finish constructs the import index and performs any other necessary indexing
// actions after all rules have been added. This step is necessary because
// a rule may be indexed differently based on what rules are added later.
//
// This function must be called after all AddRulesFromFile calls but before any
// findRuleByImport calls.
func (ix *RuleIndex) Finish() {
ix.skipGoEmbds()
ix.buildImportIndex()
}
// skipGoEmbeds sets the embedded flag on Go library rules that are imported
// by other Go library rules with the same import path. Note that embedded
// rules may still be imported with non-Go imports. For example, a
// go_proto_library may be imported with either a Go import path or a proto
// path. If the library is embedded, only the proto path will be indexed.
func (ix *RuleIndex) skipGoEmbds() {
for _, r := range ix.rules {
if !isGoLibrary(r.rule.Kind()) {
continue
}
importpath := r.rule.AttrString("importpath")
var embedLabels []label.Label
if embedList, ok := r.rule.Attr("embed").(*bf.ListExpr); ok {
for _, embedElem := range embedList.List {
embedStr, ok := embedElem.(*bf.StringExpr)
if !ok {
continue
}
embedLabel, err := label.Parse(embedStr.Value)
if err != nil {
continue
}
embedLabels = append(embedLabels, embedLabel)
}
}
if libraryStr, ok := r.rule.Attr("library").(*bf.StringExpr); ok {
if libraryLabel, err := label.Parse(libraryStr.Value); err == nil {
embedLabels = append(embedLabels, libraryLabel)
}
}
for _, l := range embedLabels {
embed, ok := ix.findRuleByLabel(l, r.label)
if !ok {
continue
}
if embed.rule.AttrString("importpath") != importpath {
continue
}
embed.embedded = true
}
}
}
// buildImportIndex constructs the map used by findRuleByImport.
func (ix *RuleIndex) buildImportIndex() {
ix.importMap = make(map[importSpec][]*ruleRecord)
for _, r := range ix.rules {
if isGoProtoLibrary(r.rule.Kind()) {
protoImports := findGoProtoSources(ix, r)
r.importedAs = append(r.importedAs, protoImports...)
}
for _, imp := range r.importedAs {
if imp.lang == config.GoLang && r.embedded {
continue
}
ix.importMap[imp] = append(ix.importMap[imp], r)
}
}
}
type ruleNotFoundError struct {
from label.Label
imp string
}
func (e ruleNotFoundError) Error() string {
return fmt.Sprintf("no rule found for import %q, needed in %s", e.imp, e.from)
}
type selfImportError struct {
from label.Label
imp string
}
func (e selfImportError) Error() string {
return fmt.Sprintf("rule %s imports itself with path %q", e.from, e.imp)
}
func (ix *RuleIndex) findRuleByLabel(label label.Label, from label.Label) (*ruleRecord, bool) {
label = label.Abs(from.Repo, from.Pkg)
r, ok := ix.labelMap[label]
return r, ok
}
// findRuleByImport attempts to resolve an import string to a rule record.
// imp is the import to resolve (which includes the target language). lang is
// the language of the rule with the dependency (for example, in
// go_proto_library, imp will have ProtoLang and lang will be GoLang).
// from is the rule which is doing the dependency. This is used to check
// vendoring visibility and to check for self-imports.
//
// Any number of rules may provide the same import. If no rules provide the
// import, ruleNotFoundError is returned. If a rule imports itself,
// selfImportError is returned. If multiple rules provide the import, this
// function will attempt to choose one based on Go vendoring logic. In
// ambiguous cases, an error is returned.
func (ix *RuleIndex) findRuleByImport(imp importSpec, lang config.Language, from label.Label) (*ruleRecord, error) {
matches := ix.importMap[imp]
var bestMatch *ruleRecord
var bestMatchIsVendored bool
var bestMatchVendorRoot string
var matchError error
for _, m := range matches {
if m.lang != lang {
continue
}
switch imp.lang {
case config.GoLang:
// Apply vendoring logic for Go libraries. A library in a vendor directory
// is only visible in the parent tree. Vendored libraries supercede
// non-vendored libraries, and libraries closer to from.Pkg supercede
// those further up the tree.
isVendored := false
vendorRoot := ""
parts := strings.Split(m.label.Pkg, "/")
for i := len(parts) - 1; i >= 0; i-- {
if parts[i] == "vendor" {
isVendored = true
vendorRoot = strings.Join(parts[:i], "/")
break
}
}
if isVendored && !label.New(m.label.Repo, vendorRoot, "").Contains(from) {
// vendor directory not visible
continue
}
if bestMatch == nil || isVendored && (!bestMatchIsVendored || len(vendorRoot) > len(bestMatchVendorRoot)) {
// Current match is better
bestMatch = m
bestMatchIsVendored = isVendored
bestMatchVendorRoot = vendorRoot
matchError = nil
} else if (!isVendored && bestMatchIsVendored) || (isVendored && len(vendorRoot) < len(bestMatchVendorRoot)) {
// Current match is worse
} else {
// Match is ambiguous
matchError = fmt.Errorf("multiple rules (%s and %s) may be imported with %q from %s", bestMatch.label, m.label, imp.imp, from)
}
default:
if bestMatch == nil {
bestMatch = m
} else {
matchError = fmt.Errorf("multiple rules (%s and %s) may be imported with %q from %s", bestMatch.label, m.label, imp.imp, from)
}
}
}
if matchError != nil {
return nil, matchError
}
if bestMatch == nil {
return nil, ruleNotFoundError{from, imp.imp}
}
if bestMatch.label.Equal(from) {
return nil, selfImportError{from, imp.imp}
}
if imp.lang == config.ProtoLang && lang == config.GoLang {
importpath := bestMatch.rule.AttrString("importpath")
if betterMatch, err := ix.findRuleByImport(importSpec{config.GoLang, importpath}, config.GoLang, from); err == nil {
return betterMatch, nil
}
}
return bestMatch, nil
}
func (ix *RuleIndex) findLabelByImport(imp importSpec, lang config.Language, from label.Label) (label.Label, error) {
r, err := ix.findRuleByImport(imp, lang, from)
if err != nil {
return label.NoLabel, err
}
return r.label, nil
}
func findGoProtoSources(ix *RuleIndex, r *ruleRecord) []importSpec {
protoLabel, err := label.Parse(r.rule.AttrString("proto"))
if err != nil {
return nil
}
proto, ok := ix.findRuleByLabel(protoLabel, r.label)
if !ok {
return nil
}
var importedAs []importSpec
for _, source := range findSources(proto.rule, proto.label.Pkg, ".proto") {
importedAs = append(importedAs, importSpec{lang: config.ProtoLang, imp: source})
}
return importedAs
}
func findSources(r bf.Rule, buildRel, ext string) []string {
srcsExpr := r.Attr("srcs")
srcsList, ok := srcsExpr.(*bf.ListExpr)
if !ok {
return nil
}
var srcs []string
for _, srcExpr := range srcsList.List {
src, ok := srcExpr.(*bf.StringExpr)
if !ok {
continue
}
label, err := label.Parse(src.Value)
if err != nil || !label.Relative || !strings.HasSuffix(label.Name, ext) {
continue
}
srcs = append(srcs, path.Join(buildRel, label.Name))
}
return srcs
}
func isGoLibrary(kind string) bool {
return kind == "go_library" || isGoProtoLibrary(kind)
}
func isGoProtoLibrary(kind string) bool {
return kind == "go_proto_library" || kind == "go_grpc_library"
}

View File

@ -0,0 +1,350 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package resolve
import (
"fmt"
"go/build"
"log"
"path"
"strings"
"github.com/bazelbuild/bazel-gazelle/internal/config"
"github.com/bazelbuild/bazel-gazelle/internal/label"
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
"github.com/bazelbuild/bazel-gazelle/internal/repos"
bf "github.com/bazelbuild/buildtools/build"
)
// Resolver resolves import strings in source files (import paths in Go,
// import statements in protos) into Bazel labels.
type Resolver struct {
c *config.Config
l *label.Labeler
ix *RuleIndex
external nonlocalResolver
}
// nonlocalResolver resolves import paths outside of the current repository's
// prefix. Once we have smarter import path resolution, this shouldn't
// be necessary, and we can remove this abstraction.
type nonlocalResolver interface {
resolve(imp string) (label.Label, error)
}
func NewResolver(c *config.Config, l *label.Labeler, ix *RuleIndex, rc *repos.RemoteCache) *Resolver {
var e nonlocalResolver
switch c.DepMode {
case config.ExternalMode:
e = newExternalResolver(l, rc)
case config.VendorMode:
e = newVendoredResolver(l)
}
return &Resolver{
c: c,
l: l,
ix: ix,
external: e,
}
}
// ResolveRule copies and modifies a generated rule e by replacing the import
// paths in the "_gazelle_imports" attribute with labels in a "deps"
// attribute. This may be safely called on expressions that aren't Go rules
// (the original expression will be returned). Any existing "deps" attribute
// is deleted, so it may be necessary to merge the result.
func (r *Resolver) ResolveRule(e bf.Expr, pkgRel string) bf.Expr {
call, ok := e.(*bf.CallExpr)
if !ok {
return e
}
rule := bf.Rule{Call: call}
from := label.New("", pkgRel, rule.Name())
var resolve func(imp string, from label.Label) (label.Label, error)
switch rule.Kind() {
case "go_library", "go_binary", "go_test":
resolve = r.resolveGo
case "proto_library":
resolve = r.resolveProto
case "go_proto_library", "go_grpc_library":
resolve = r.resolveGoProto
default:
return e
}
resolved := *call
resolved.List = append([]bf.Expr{}, call.List...)
rule.Call = &resolved
imports := rule.Attr(config.GazelleImportsKey)
rule.DelAttr(config.GazelleImportsKey)
rule.DelAttr("deps")
deps := mapExprStrings(imports, func(imp string) string {
label, err := resolve(imp, from)
if err != nil {
switch err.(type) {
case standardImportError, selfImportError:
return ""
default:
log.Print(err)
return ""
}
}
label.Relative = label.Repo == "" && label.Pkg == pkgRel
return label.String()
})
if deps != nil {
rule.SetAttr("deps", deps)
}
return &resolved
}
type standardImportError struct {
imp string
}
func (e standardImportError) Error() string {
return fmt.Sprintf("import path %q is in the standard library", e.imp)
}
// mapExprStrings applies a function f to the strings in e and returns a new
// expression with the results. Scalar strings, lists, dicts, selects, and
// concatenations are supported.
func mapExprStrings(e bf.Expr, f func(string) string) bf.Expr {
if e == nil {
return nil
}
switch expr := e.(type) {
case *bf.StringExpr:
s := f(expr.Value)
if s == "" {
return nil
}
ret := *expr
ret.Value = s
return &ret
case *bf.ListExpr:
var list []bf.Expr
for _, elem := range expr.List {
elem = mapExprStrings(elem, f)
if elem != nil {
list = append(list, elem)
}
}
if len(list) == 0 && len(expr.List) > 0 {
return nil
}
ret := *expr
ret.List = list
return &ret
case *bf.DictExpr:
var cases []bf.Expr
isEmpty := true
for _, kv := range expr.List {
keyval, ok := kv.(*bf.KeyValueExpr)
if !ok {
log.Panicf("unexpected expression in generated imports dict: %#v", kv)
}
value := mapExprStrings(keyval.Value, f)
if value != nil {
cases = append(cases, &bf.KeyValueExpr{Key: keyval.Key, Value: value})
if key, ok := keyval.Key.(*bf.StringExpr); !ok || key.Value != "//conditions:default" {
isEmpty = false
}
}
}
if isEmpty {
return nil
}
ret := *expr
ret.List = cases
return &ret
case *bf.CallExpr:
if x, ok := expr.X.(*bf.LiteralExpr); !ok || x.Token != "select" || len(expr.List) != 1 {
log.Panicf("unexpected call expression in generated imports: %#v", e)
}
arg := mapExprStrings(expr.List[0], f)
if arg == nil {
return nil
}
call := *expr
call.List[0] = arg
return &call
case *bf.BinaryExpr:
x := mapExprStrings(expr.X, f)
y := mapExprStrings(expr.Y, f)
if x == nil {
return y
}
if y == nil {
return x
}
binop := *expr
binop.X = x
binop.Y = y
return &binop
default:
log.Panicf("unexpected expression in generated imports: %#v", e)
return nil
}
}
// resolveGo resolves an import path from a Go source file to a label.
// pkgRel is the path to the Go package relative to the repository root; it
// is used to resolve relative imports.
func (r *Resolver) resolveGo(imp string, from label.Label) (label.Label, error) {
if build.IsLocalImport(imp) {
cleanRel := path.Clean(path.Join(from.Pkg, imp))
if build.IsLocalImport(cleanRel) {
return label.NoLabel, fmt.Errorf("relative import path %q from %q points outside of repository", imp, from.Pkg)
}
imp = path.Join(r.c.GoPrefix, cleanRel)
}
if IsStandard(imp) {
return label.NoLabel, standardImportError{imp}
}
if l, err := r.ix.findLabelByImport(importSpec{config.GoLang, imp}, config.GoLang, from); err != nil {
if _, ok := err.(ruleNotFoundError); !ok {
return label.NoLabel, err
}
} else {
return l, nil
}
if pathtools.HasPrefix(imp, r.c.GoPrefix) {
return r.l.LibraryLabel(pathtools.TrimPrefix(imp, r.c.GoPrefix)), nil
}
return r.external.resolve(imp)
}
const (
wellKnownPrefix = "google/protobuf/"
wellKnownGoProtoPkg = "ptypes"
descriptorPkg = "protoc-gen-go/descriptor"
)
// resolveProto resolves an import statement in a .proto file to a label
// for a proto_library rule.
func (r *Resolver) resolveProto(imp string, from label.Label) (label.Label, error) {
if !strings.HasSuffix(imp, ".proto") {
return label.NoLabel, fmt.Errorf("can't import non-proto: %q", imp)
}
if isWellKnown(imp) {
name := path.Base(imp[:len(imp)-len(".proto")]) + "_proto"
return label.New(config.WellKnownTypesProtoRepo, "", name), nil
}
if l, err := r.ix.findLabelByImport(importSpec{config.ProtoLang, imp}, config.ProtoLang, from); err != nil {
if _, ok := err.(ruleNotFoundError); !ok {
return label.NoLabel, err
}
} else {
return l, nil
}
rel := path.Dir(imp)
if rel == "." {
rel = ""
}
name := pathtools.RelBaseName(rel, r.c.GoPrefix, r.c.RepoRoot)
return r.l.ProtoLabel(rel, name), nil
}
// resolveGoProto resolves an import statement in a .proto file to a
// label for a go_library rule that embeds the corresponding go_proto_library.
func (r *Resolver) resolveGoProto(imp string, from label.Label) (label.Label, error) {
if !strings.HasSuffix(imp, ".proto") {
return label.NoLabel, fmt.Errorf("can't import non-proto: %q", imp)
}
stem := imp[:len(imp)-len(".proto")]
if isWellKnown(stem) {
// Well Known Type
base := path.Base(stem)
if base == "descriptor" {
switch r.c.DepMode {
case config.ExternalMode:
label := r.l.LibraryLabel(descriptorPkg)
if r.c.GoPrefix != config.WellKnownTypesGoPrefix {
label.Repo = config.WellKnownTypesGoProtoRepo
}
return label, nil
case config.VendorMode:
pkg := path.Join("vendor", config.WellKnownTypesGoPrefix, descriptorPkg)
label := r.l.LibraryLabel(pkg)
return label, nil
default:
log.Panicf("unknown external mode: %v", r.c.DepMode)
}
}
switch r.c.DepMode {
case config.ExternalMode:
pkg := path.Join(wellKnownGoProtoPkg, base)
label := r.l.LibraryLabel(pkg)
if r.c.GoPrefix != config.WellKnownTypesGoPrefix {
label.Repo = config.WellKnownTypesGoProtoRepo
}
return label, nil
case config.VendorMode:
pkg := path.Join("vendor", config.WellKnownTypesGoPrefix, wellKnownGoProtoPkg, base)
return r.l.LibraryLabel(pkg), nil
default:
log.Panicf("unknown external mode: %v", r.c.DepMode)
}
}
if l, err := r.ix.findLabelByImport(importSpec{config.ProtoLang, imp}, config.GoLang, from); err != nil {
if _, ok := err.(ruleNotFoundError); !ok {
return label.NoLabel, err
}
} else {
return l, err
}
// As a fallback, guess the label based on the proto file name. We assume
// all proto files in a directory belong to the same package, and the
// package name matches the directory base name. We also assume that protos
// in the vendor directory must refer to something else in vendor.
rel := path.Dir(imp)
if rel == "." {
rel = ""
}
if from.Pkg == "vendor" || strings.HasPrefix(from.Pkg, "vendor/") {
rel = path.Join("vendor", rel)
}
return r.l.LibraryLabel(rel), nil
}
// IsStandard returns whether a package is in the standard library.
func IsStandard(imp string) bool {
return stdPackages[imp]
}
func isWellKnown(imp string) bool {
return strings.HasPrefix(imp, wellKnownPrefix) && strings.TrimPrefix(imp, wellKnownPrefix) == path.Base(imp)
}

View File

@ -0,0 +1,59 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package resolve
import (
"github.com/bazelbuild/bazel-gazelle/internal/label"
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
"github.com/bazelbuild/bazel-gazelle/internal/repos"
)
// externalResolver resolves import paths to external repositories. It uses
// vcs to determine the prefix of the import path that corresponds to the root
// of the repository (this will perform a network fetch for unqualified paths).
// The prefix is converted to a Bazel external name repo according to the
// guidelines in http://bazel.io/docs/be/functions.html#workspace. The remaining
// portion of the import path is treated as the package name.
type externalResolver struct {
l *label.Labeler
rc *repos.RemoteCache
}
var _ nonlocalResolver = (*externalResolver)(nil)
func newExternalResolver(l *label.Labeler, rc *repos.RemoteCache) *externalResolver {
return &externalResolver{l: l, rc: rc}
}
// Resolve resolves "importPath" into a label, assuming that it is a label in an
// external repository. It also assumes that the external repository follows the
// recommended reverse-DNS form of workspace name as described in
// http://bazel.io/docs/be/functions.html#workspace.
func (r *externalResolver) resolve(importPath string) (label.Label, error) {
prefix, repo, err := r.rc.Root(importPath)
if err != nil {
return label.NoLabel, err
}
var pkg string
if importPath != prefix {
pkg = pathtools.TrimPrefix(importPath, prefix)
}
l := r.l.LibraryLabel(pkg)
l.Repo = repo
return l, nil
}

View File

@ -0,0 +1,35 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package resolve
import (
"github.com/bazelbuild/bazel-gazelle/internal/label"
)
// vendoredResolver resolves external packages as packages in vendor/.
type vendoredResolver struct {
l *label.Labeler
}
var _ nonlocalResolver = (*vendoredResolver)(nil)
func newVendoredResolver(l *label.Labeler) *vendoredResolver {
return &vendoredResolver{l}
}
func (v *vendoredResolver) resolve(importpath string) (label.Label, error) {
return v.l.LibraryLabel("vendor/" + importpath), nil
}

View File

@ -0,0 +1,273 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Generated by gen_std_package_list.go
// DO NOT EDIT
package resolve
var stdPackages = map[string]bool{
"archive/tar": true,
"archive/zip": true,
"bufio": true,
"bytes": true,
"cmd/addr2line": true,
"cmd/api": true,
"cmd/asm": true,
"cmd/asm/internal/arch": true,
"cmd/asm/internal/asm": true,
"cmd/asm/internal/flags": true,
"cmd/asm/internal/lex": true,
"cmd/cgo": true,
"cmd/compile": true,
"cmd/compile/internal/amd64": true,
"cmd/compile/internal/arm": true,
"cmd/compile/internal/arm64": true,
"cmd/compile/internal/gc": true,
"cmd/compile/internal/mips": true,
"cmd/compile/internal/mips64": true,
"cmd/compile/internal/ppc64": true,
"cmd/compile/internal/s390x": true,
"cmd/compile/internal/ssa": true,
"cmd/compile/internal/syntax": true,
"cmd/compile/internal/test": true,
"cmd/compile/internal/types": true,
"cmd/compile/internal/x86": true,
"cmd/cover": true,
"cmd/dist": true,
"cmd/doc": true,
"cmd/fix": true,
"cmd/go": true,
"cmd/go/internal/base": true,
"cmd/go/internal/bug": true,
"cmd/go/internal/buildid": true,
"cmd/go/internal/cfg": true,
"cmd/go/internal/clean": true,
"cmd/go/internal/cmdflag": true,
"cmd/go/internal/doc": true,
"cmd/go/internal/envcmd": true,
"cmd/go/internal/fix": true,
"cmd/go/internal/fmtcmd": true,
"cmd/go/internal/generate": true,
"cmd/go/internal/get": true,
"cmd/go/internal/help": true,
"cmd/go/internal/list": true,
"cmd/go/internal/load": true,
"cmd/go/internal/run": true,
"cmd/go/internal/str": true,
"cmd/go/internal/test": true,
"cmd/go/internal/tool": true,
"cmd/go/internal/version": true,
"cmd/go/internal/vet": true,
"cmd/go/internal/web": true,
"cmd/go/internal/work": true,
"cmd/gofmt": true,
"cmd/internal/bio": true,
"cmd/internal/browser": true,
"cmd/internal/dwarf": true,
"cmd/internal/gcprog": true,
"cmd/internal/goobj": true,
"cmd/internal/obj": true,
"cmd/internal/obj/arm": true,
"cmd/internal/obj/arm64": true,
"cmd/internal/obj/mips": true,
"cmd/internal/obj/ppc64": true,
"cmd/internal/obj/s390x": true,
"cmd/internal/obj/x86": true,
"cmd/internal/objabi": true,
"cmd/internal/objfile": true,
"cmd/internal/src": true,
"cmd/internal/sys": true,
"cmd/link": true,
"cmd/link/internal/amd64": true,
"cmd/link/internal/arm": true,
"cmd/link/internal/arm64": true,
"cmd/link/internal/ld": true,
"cmd/link/internal/mips": true,
"cmd/link/internal/mips64": true,
"cmd/link/internal/ppc64": true,
"cmd/link/internal/s390x": true,
"cmd/link/internal/x86": true,
"cmd/nm": true,
"cmd/objdump": true,
"cmd/pack": true,
"cmd/pprof": true,
"cmd/trace": true,
"cmd/vet": true,
"cmd/vet/internal/cfg": true,
"cmd/vet/internal/whitelist": true,
"compress/bzip2": true,
"compress/flate": true,
"compress/gzip": true,
"compress/lzw": true,
"compress/zlib": true,
"container/heap": true,
"container/list": true,
"container/ring": true,
"context": true,
"crypto": true,
"crypto/aes": true,
"crypto/cipher": true,
"crypto/des": true,
"crypto/dsa": true,
"crypto/ecdsa": true,
"crypto/elliptic": true,
"crypto/hmac": true,
"crypto/internal/cipherhw": true,
"crypto/md5": true,
"crypto/rand": true,
"crypto/rc4": true,
"crypto/rsa": true,
"crypto/sha1": true,
"crypto/sha256": true,
"crypto/sha512": true,
"crypto/subtle": true,
"crypto/tls": true,
"crypto/x509": true,
"crypto/x509/pkix": true,
"database/sql": true,
"database/sql/driver": true,
"debug/dwarf": true,
"debug/elf": true,
"debug/gosym": true,
"debug/macho": true,
"debug/pe": true,
"debug/plan9obj": true,
"encoding": true,
"encoding/ascii85": true,
"encoding/asn1": true,
"encoding/base32": true,
"encoding/base64": true,
"encoding/binary": true,
"encoding/csv": true,
"encoding/gob": true,
"encoding/hex": true,
"encoding/json": true,
"encoding/pem": true,
"encoding/xml": true,
"errors": true,
"expvar": true,
"flag": true,
"fmt": true,
"go/ast": true,
"go/build": true,
"go/constant": true,
"go/doc": true,
"go/format": true,
"go/importer": true,
"go/internal/gccgoimporter": true,
"go/internal/gcimporter": true,
"go/internal/srcimporter": true,
"go/parser": true,
"go/printer": true,
"go/scanner": true,
"go/token": true,
"go/types": true,
"hash": true,
"hash/adler32": true,
"hash/crc32": true,
"hash/crc64": true,
"hash/fnv": true,
"html": true,
"html/template": true,
"image": true,
"image/color": true,
"image/color/palette": true,
"image/draw": true,
"image/gif": true,
"image/internal/imageutil": true,
"image/jpeg": true,
"image/png": true,
"index/suffixarray": true,
"internal/cpu": true,
"internal/nettrace": true,
"internal/poll": true,
"internal/race": true,
"internal/singleflight": true,
"internal/syscall/unix": true,
"internal/syscall/windows": true,
"internal/syscall/windows/registry": true,
"internal/syscall/windows/sysdll": true,
"internal/testenv": true,
"internal/trace": true,
"io": true,
"io/ioutil": true,
"log": true,
"log/syslog": true,
"math": true,
"math/big": true,
"math/bits": true,
"math/cmplx": true,
"math/rand": true,
"mime": true,
"mime/multipart": true,
"mime/quotedprintable": true,
"net": true,
"net/http": true,
"net/http/cgi": true,
"net/http/cookiejar": true,
"net/http/fcgi": true,
"net/http/httptest": true,
"net/http/httptrace": true,
"net/http/httputil": true,
"net/http/internal": true,
"net/http/pprof": true,
"net/internal/socktest": true,
"net/mail": true,
"net/rpc": true,
"net/rpc/jsonrpc": true,
"net/smtp": true,
"net/textproto": true,
"net/url": true,
"os": true,
"os/exec": true,
"os/signal": true,
"os/user": true,
"path": true,
"path/filepath": true,
"plugin": true,
"reflect": true,
"regexp": true,
"regexp/syntax": true,
"runtime": true,
"runtime/cgo": true,
"runtime/debug": true,
"runtime/internal/atomic": true,
"runtime/internal/sys": true,
"runtime/pprof": true,
"runtime/pprof/internal/profile": true,
"runtime/race": true,
"runtime/trace": true,
"sort": true,
"strconv": true,
"strings": true,
"sync": true,
"sync/atomic": true,
"syscall": true,
"testing": true,
"testing/internal/testdeps": true,
"testing/iotest": true,
"testing/quick": true,
"text/scanner": true,
"text/tabwriter": true,
"text/template": true,
"text/template/parse": true,
"time": true,
"unicode": true,
"unicode/utf16": true,
"unicode/utf8": true,
"unsafe": true,
}

View File

@ -0,0 +1,34 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"construct.go",
"doc.go",
"generator.go",
"sort_labels.go",
],
importpath = "github.com/bazelbuild/bazel-gazelle/internal/rules",
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
deps = [
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/packages:go_default_library",
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
"//vendor/github.com/bazelbuild/buildtools/tables:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,217 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package rules
import (
"fmt"
"log"
"reflect"
"sort"
"github.com/bazelbuild/bazel-gazelle/internal/config"
"github.com/bazelbuild/bazel-gazelle/internal/packages"
bf "github.com/bazelbuild/buildtools/build"
bt "github.com/bazelbuild/buildtools/tables"
)
// KeyValue represents a key-value pair. This gets converted into a
// rule attribute, i.e., a Skylark keyword argument.
type KeyValue struct {
Key string
Value interface{}
}
// GlobValue represents a Bazel glob expression.
type GlobValue struct {
Patterns []string
Excludes []string
}
// EmptyRule generates an empty rule with the given kind and name.
func EmptyRule(kind, name string) *bf.CallExpr {
return NewRule(kind, []KeyValue{{"name", name}})
}
// NewRule generates a rule of the given kind with the given attributes.
func NewRule(kind string, kwargs []KeyValue) *bf.CallExpr {
sort.Sort(byAttrName(kwargs))
var list []bf.Expr
for _, arg := range kwargs {
expr := newValue(arg.Value)
list = append(list, &bf.BinaryExpr{
X: &bf.LiteralExpr{Token: arg.Key},
Op: "=",
Y: expr,
})
}
return &bf.CallExpr{
X: &bf.LiteralExpr{Token: kind},
List: list,
}
}
// newValue converts a Go value into the corresponding expression in Bazel BUILD file.
func newValue(val interface{}) bf.Expr {
rv := reflect.ValueOf(val)
switch rv.Kind() {
case reflect.Bool:
tok := "False"
if rv.Bool() {
tok = "True"
}
return &bf.LiteralExpr{Token: tok}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return &bf.LiteralExpr{Token: fmt.Sprintf("%d", val)}
case reflect.Float32, reflect.Float64:
return &bf.LiteralExpr{Token: fmt.Sprintf("%f", val)}
case reflect.String:
return &bf.StringExpr{Value: val.(string)}
case reflect.Slice, reflect.Array:
var list []bf.Expr
for i := 0; i < rv.Len(); i++ {
elem := newValue(rv.Index(i).Interface())
list = append(list, elem)
}
return &bf.ListExpr{List: list}
case reflect.Map:
rkeys := rv.MapKeys()
sort.Sort(byString(rkeys))
args := make([]bf.Expr, len(rkeys))
for i, rk := range rkeys {
label := fmt.Sprintf("@%s//go/platform:%s", config.RulesGoRepoName, mapKeyString(rk))
k := &bf.StringExpr{Value: label}
v := newValue(rv.MapIndex(rk).Interface())
if l, ok := v.(*bf.ListExpr); ok {
l.ForceMultiLine = true
}
args[i] = &bf.KeyValueExpr{Key: k, Value: v}
}
args = append(args, &bf.KeyValueExpr{
Key: &bf.StringExpr{Value: "//conditions:default"},
Value: &bf.ListExpr{},
})
sel := &bf.CallExpr{
X: &bf.LiteralExpr{Token: "select"},
List: []bf.Expr{&bf.DictExpr{List: args, ForceMultiLine: true}},
}
return sel
case reflect.Struct:
switch val := val.(type) {
case GlobValue:
patternsValue := newValue(val.Patterns)
globArgs := []bf.Expr{patternsValue}
if len(val.Excludes) > 0 {
excludesValue := newValue(val.Excludes)
globArgs = append(globArgs, &bf.KeyValueExpr{
Key: &bf.StringExpr{Value: "excludes"},
Value: excludesValue,
})
}
return &bf.CallExpr{
X: &bf.LiteralExpr{Token: "glob"},
List: globArgs,
}
case packages.PlatformStrings:
var pieces []bf.Expr
if len(val.Generic) > 0 {
pieces = append(pieces, newValue(val.Generic))
}
if len(val.OS) > 0 {
pieces = append(pieces, newValue(val.OS))
}
if len(val.Arch) > 0 {
pieces = append(pieces, newValue(val.Arch))
}
if len(val.Platform) > 0 {
pieces = append(pieces, newValue(val.Platform))
}
if len(pieces) == 0 {
return &bf.ListExpr{}
} else if len(pieces) == 1 {
return pieces[0]
} else {
e := pieces[0]
if list, ok := e.(*bf.ListExpr); ok {
list.ForceMultiLine = true
}
for _, piece := range pieces[1:] {
e = &bf.BinaryExpr{X: e, Y: piece, Op: "+"}
}
return e
}
}
}
log.Panicf("type not supported: %T", val)
return nil
}
func mapKeyString(k reflect.Value) string {
switch s := k.Interface().(type) {
case string:
return s
case config.Platform:
return s.String()
default:
log.Panicf("unexpected map key: %v", k)
return ""
}
}
type byAttrName []KeyValue
var _ sort.Interface = byAttrName{}
func (s byAttrName) Len() int {
return len(s)
}
func (s byAttrName) Less(i, j int) bool {
if cmp := bt.NamePriority[s[i].Key] - bt.NamePriority[s[j].Key]; cmp != 0 {
return cmp < 0
}
return s[i].Key < s[j].Key
}
func (s byAttrName) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
type byString []reflect.Value
var _ sort.Interface = byString{}
func (s byString) Len() int {
return len(s)
}
func (s byString) Less(i, j int) bool {
return mapKeyString(s[i]) < mapKeyString(s[j])
}
func (s byString) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}

View File

@ -0,0 +1,17 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package rules provides Bazel rule generation for Go build targets.
package rules

View File

@ -0,0 +1,326 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package rules
import (
"fmt"
"log"
"path"
"strings"
"github.com/bazelbuild/bazel-gazelle/internal/config"
"github.com/bazelbuild/bazel-gazelle/internal/label"
"github.com/bazelbuild/bazel-gazelle/internal/packages"
bf "github.com/bazelbuild/buildtools/build"
)
// NewGenerator returns a new instance of Generator.
// "oldFile" is the existing build file. May be nil.
func NewGenerator(c *config.Config, l *label.Labeler, oldFile *bf.File) *Generator {
shouldSetVisibility := oldFile == nil || !hasDefaultVisibility(oldFile)
return &Generator{c: c, l: l, shouldSetVisibility: shouldSetVisibility}
}
// Generator generates Bazel build rules for Go build targets.
type Generator struct {
c *config.Config
l *label.Labeler
shouldSetVisibility bool
}
// GenerateRules generates a list of rules for targets in "pkg". It also returns
// a list of empty rules that may be deleted from an existing file.
func (g *Generator) GenerateRules(pkg *packages.Package) (rules []bf.Expr, empty []bf.Expr, err error) {
var rs []bf.Expr
protoLibName, protoRules := g.generateProto(pkg)
rs = append(rs, protoRules...)
libName, libRule := g.generateLib(pkg, protoLibName)
rs = append(rs, libRule)
rs = append(rs,
g.generateBin(pkg, libName),
g.generateTest(pkg, libName, false),
g.generateTest(pkg, "", true))
for _, r := range rs {
if isEmpty(r) {
empty = append(empty, r)
} else {
rules = append(rules, r)
}
}
return rules, empty, nil
}
func (g *Generator) generateProto(pkg *packages.Package) (string, []bf.Expr) {
if g.c.ProtoMode == config.DisableProtoMode {
// Don't create or delete proto rules in this mode. Any existing rules
// are likely hand-written.
return "", nil
}
filegroupName := config.DefaultProtosName
protoName := g.l.ProtoLabel(pkg.Rel, pkg.Name).Name
goProtoName := g.l.GoProtoLabel(pkg.Rel, pkg.Name).Name
if g.c.ProtoMode == config.LegacyProtoMode {
if !pkg.Proto.HasProto() {
return "", []bf.Expr{EmptyRule("filegroup", filegroupName)}
}
attrs := []KeyValue{
{Key: "name", Value: filegroupName},
{Key: "srcs", Value: pkg.Proto.Sources},
}
if g.shouldSetVisibility {
attrs = append(attrs, KeyValue{"visibility", []string{checkInternalVisibility(pkg.Rel, "//visibility:public")}})
}
return "", []bf.Expr{NewRule("filegroup", attrs)}
}
if !pkg.Proto.HasProto() {
return "", []bf.Expr{
EmptyRule("filegroup", filegroupName),
EmptyRule("proto_library", protoName),
EmptyRule("go_proto_library", goProtoName),
}
}
var rules []bf.Expr
visibility := []string{checkInternalVisibility(pkg.Rel, "//visibility:public")}
protoAttrs := []KeyValue{
{"name", protoName},
{"srcs", pkg.Proto.Sources},
}
if g.shouldSetVisibility {
protoAttrs = append(protoAttrs, KeyValue{"visibility", visibility})
}
imports := pkg.Proto.Imports
if !imports.IsEmpty() {
protoAttrs = append(protoAttrs, KeyValue{config.GazelleImportsKey, imports})
}
rules = append(rules, NewRule("proto_library", protoAttrs))
goProtoAttrs := []KeyValue{
{"name", goProtoName},
{"proto", ":" + protoName},
{"importpath", pkg.ImportPath},
}
if pkg.Proto.HasServices {
goProtoAttrs = append(goProtoAttrs, KeyValue{"compilers", []string{"@io_bazel_rules_go//proto:go_grpc"}})
}
if g.shouldSetVisibility {
goProtoAttrs = append(goProtoAttrs, KeyValue{"visibility", visibility})
}
if !imports.IsEmpty() {
goProtoAttrs = append(goProtoAttrs, KeyValue{config.GazelleImportsKey, imports})
}
rules = append(rules, NewRule("go_proto_library", goProtoAttrs))
return goProtoName, rules
}
func (g *Generator) generateBin(pkg *packages.Package, library string) bf.Expr {
name := g.l.BinaryLabel(pkg.Rel).Name
if !pkg.IsCommand() || pkg.Binary.Sources.IsEmpty() && library == "" {
return EmptyRule("go_binary", name)
}
visibility := checkInternalVisibility(pkg.Rel, "//visibility:public")
attrs := g.commonAttrs(pkg.Rel, name, visibility, pkg.Binary)
if library != "" {
attrs = append(attrs, KeyValue{"embed", []string{":" + library}})
}
return NewRule("go_binary", attrs)
}
func (g *Generator) generateLib(pkg *packages.Package, goProtoName string) (string, *bf.CallExpr) {
name := g.l.LibraryLabel(pkg.Rel).Name
if !pkg.Library.HasGo() && goProtoName == "" {
return "", EmptyRule("go_library", name)
}
var visibility string
if pkg.IsCommand() {
// Libraries made for a go_binary should not be exposed to the public.
visibility = "//visibility:private"
} else {
visibility = checkInternalVisibility(pkg.Rel, "//visibility:public")
}
attrs := g.commonAttrs(pkg.Rel, name, visibility, pkg.Library)
attrs = append(attrs, KeyValue{"importpath", pkg.ImportPath})
if goProtoName != "" {
attrs = append(attrs, KeyValue{"embed", []string{":" + goProtoName}})
}
rule := NewRule("go_library", attrs)
return name, rule
}
// hasDefaultVisibility returns whether oldFile contains a "package" rule with
// a "default_visibility" attribute. Rules generated by Gazelle should not
// have their own visibility attributes if this is the case.
func hasDefaultVisibility(oldFile *bf.File) bool {
for _, s := range oldFile.Stmt {
c, ok := s.(*bf.CallExpr)
if !ok {
continue
}
r := bf.Rule{Call: c}
if r.Kind() == "package" && r.Attr("default_visibility") != nil {
return true
}
}
return false
}
// checkInternalVisibility overrides the given visibility if the package is
// internal.
func checkInternalVisibility(rel, visibility string) string {
if i := strings.LastIndex(rel, "/internal/"); i >= 0 {
visibility = fmt.Sprintf("//%s:__subpackages__", rel[:i])
} else if strings.HasPrefix(rel, "internal/") {
visibility = "//:__subpackages__"
}
return visibility
}
func (g *Generator) generateTest(pkg *packages.Package, library string, isXTest bool) bf.Expr {
name := g.l.TestLabel(pkg.Rel, isXTest).Name
target := pkg.Test
if isXTest {
target = pkg.XTest
}
if !target.HasGo() {
return EmptyRule("go_test", name)
}
attrs := g.commonAttrs(pkg.Rel, name, "", target)
if library != "" {
attrs = append(attrs, KeyValue{"embed", []string{":" + library}})
}
if pkg.HasTestdata {
glob := GlobValue{Patterns: []string{"testdata/**"}}
attrs = append(attrs, KeyValue{"data", glob})
}
return NewRule("go_test", attrs)
}
func (g *Generator) commonAttrs(pkgRel, name, visibility string, target packages.GoTarget) []KeyValue {
attrs := []KeyValue{{"name", name}}
if !target.Sources.IsEmpty() {
attrs = append(attrs, KeyValue{"srcs", target.Sources})
}
if target.Cgo {
attrs = append(attrs, KeyValue{"cgo", true})
}
if !target.CLinkOpts.IsEmpty() {
attrs = append(attrs, KeyValue{"clinkopts", g.options(target.CLinkOpts, pkgRel)})
}
if !target.COpts.IsEmpty() {
attrs = append(attrs, KeyValue{"copts", g.options(target.COpts, pkgRel)})
}
if g.shouldSetVisibility && visibility != "" {
attrs = append(attrs, KeyValue{"visibility", []string{visibility}})
}
imports := target.Imports
if !imports.IsEmpty() {
attrs = append(attrs, KeyValue{config.GazelleImportsKey, imports})
}
return attrs
}
var (
// shortOptPrefixes are strings that come at the beginning of an option
// argument that includes a path, e.g., -Ifoo/bar.
shortOptPrefixes = []string{"-I", "-L", "-F"}
// longOptPrefixes are separate arguments that come before a path argument,
// e.g., -iquote foo/bar.
longOptPrefixes = []string{"-I", "-L", "-F", "-iquote", "-isystem"}
)
// options transforms package-relative paths in cgo options into repository-
// root-relative paths that Bazel can understand. For example, if a cgo file
// in //foo declares an include flag in its copts: "-Ibar", this method
// will transform that flag into "-Ifoo/bar".
func (g *Generator) options(opts packages.PlatformStrings, pkgRel string) packages.PlatformStrings {
fixPath := func(opt string) string {
if strings.HasPrefix(opt, "/") {
return opt
}
return path.Clean(path.Join(pkgRel, opt))
}
fixGroups := func(groups []string) ([]string, error) {
fixedGroups := make([]string, len(groups))
for i, group := range groups {
opts := strings.Split(group, packages.OptSeparator)
fixedOpts := make([]string, len(opts))
isPath := false
for j, opt := range opts {
if isPath {
opt = fixPath(opt)
isPath = false
goto next
}
for _, short := range shortOptPrefixes {
if strings.HasPrefix(opt, short) && len(opt) > len(short) {
opt = short + fixPath(opt[len(short):])
goto next
}
}
for _, long := range longOptPrefixes {
if opt == long {
isPath = true
goto next
}
}
next:
fixedOpts[j] = escapeOption(opt)
}
fixedGroups[i] = strings.Join(fixedOpts, " ")
}
return fixedGroups, nil
}
opts, errs := opts.MapSlice(fixGroups)
if errs != nil {
log.Panicf("unexpected error when transforming options with pkg %q: %v", pkgRel, errs)
}
return opts
}
func escapeOption(opt string) string {
return strings.NewReplacer(
`\`, `\\`,
`'`, `\'`,
`"`, `\"`,
` `, `\ `,
"\t", "\\\t",
"\n", "\\\n",
"\r", "\\\r",
).Replace(opt)
}
func isEmpty(r bf.Expr) bool {
c, ok := r.(*bf.CallExpr)
return ok && len(c.List) == 1 // name
}

View File

@ -0,0 +1,145 @@
/* Copyright 2017 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package rules
import (
"sort"
"strings"
bf "github.com/bazelbuild/buildtools/build"
)
var (
goRuleKinds = map[string]bool{
"cgo_library": true,
"go_binary": true,
"go_library": true,
"go_test": true,
}
sortedAttrs = []string{"srcs", "deps"}
)
// SortLabels sorts lists of strings in "srcs" and "deps" attributes of
// Go rules using the same order as buildifier. Buildifier also sorts string
// lists, but not those involved with "select" expressions.
// TODO(jayconrod): remove this when bazelbuild/buildtools#122 is fixed.
func SortLabels(f *bf.File) {
for _, s := range f.Stmt {
c, ok := s.(*bf.CallExpr)
if !ok {
continue
}
r := bf.Rule{Call: c}
if !goRuleKinds[r.Kind()] {
continue
}
for _, key := range []string{"srcs", "deps"} {
attr := r.AttrDefn(key)
if attr == nil {
continue
}
bf.Walk(attr.Y, sortExprLabels)
}
}
}
func sortExprLabels(e bf.Expr, _ []bf.Expr) {
list, ok := e.(*bf.ListExpr)
if !ok || len(list.List) == 0 {
return
}
keys := make([]stringSortKey, len(list.List))
for i, elem := range list.List {
s, ok := elem.(*bf.StringExpr)
if !ok {
return // don't sort lists unless all elements are strings
}
keys[i] = makeSortKey(i, s)
}
before := keys[0].x.Comment().Before
keys[0].x.Comment().Before = nil
sort.Sort(byStringExpr(keys))
keys[0].x.Comment().Before = append(before, keys[0].x.Comment().Before...)
for i, k := range keys {
list.List[i] = k.x
}
}
// Code below this point is adapted from
// github.com/bazelbuild/buildtools/build/rewrite.go
// A stringSortKey records information about a single string literal to be
// sorted. The strings are first grouped into four phases: most strings,
// strings beginning with ":", strings beginning with "//", and strings
// beginning with "@". The next significant part of the comparison is the list
// of elements in the value, where elements are split at `.' and `:'. Finally
// we compare by value and break ties by original index.
type stringSortKey struct {
phase int
split []string
value string
original int
x bf.Expr
}
func makeSortKey(index int, x *bf.StringExpr) stringSortKey {
key := stringSortKey{
value: x.Value,
original: index,
x: x,
}
switch {
case strings.HasPrefix(x.Value, ":"):
key.phase = 1
case strings.HasPrefix(x.Value, "//"):
key.phase = 2
case strings.HasPrefix(x.Value, "@"):
key.phase = 3
}
key.split = strings.Split(strings.Replace(x.Value, ":", ".", -1), ".")
return key
}
// byStringExpr implements sort.Interface for a list of stringSortKey.
type byStringExpr []stringSortKey
func (x byStringExpr) Len() int { return len(x) }
func (x byStringExpr) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x byStringExpr) Less(i, j int) bool {
xi := x[i]
xj := x[j]
if xi.phase != xj.phase {
return xi.phase < xj.phase
}
for k := 0; k < len(xi.split) && k < len(xj.split); k++ {
if xi.split[k] != xj.split[k] {
return xi.split[k] < xj.split[k]
}
}
if len(xi.split) != len(xj.split) {
return len(xi.split) < len(xj.split)
}
if xi.value != xj.value {
return xi.value < xj.value
}
return xi.original < xj.original
}

View File

@ -0,0 +1,22 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = ["version.go"],
importpath = "github.com/bazelbuild/bazel-gazelle/internal/version",
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,72 @@
/* Copyright 2018 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package version
import (
"fmt"
"strconv"
"strings"
)
// Version is a tuple of non-negative integers that represents the version of
// a software package.
type Version []int
func (v Version) String() string {
cstrs := make([]string, len(v))
for i, cn := range v {
cstrs[i] = strconv.Itoa(cn)
}
return strings.Join(cstrs, ".")
}
// Compare returns an integer comparing two versions lexicographically.
func (x Version) Compare(y Version) int {
n := len(x)
if len(y) < n {
n = len(y)
}
for i := 0; i < n; i++ {
cmp := x[i] - y[i]
if cmp != 0 {
return cmp
}
}
return len(x) - len(y)
}
// ParseVersion parses a version of the form "12.34.56-abcd". Non-negative
// integer components are separated by dots. An arbitrary suffix may appear
// after '-', which is ignored.
func ParseVersion(vs string) (Version, error) {
i := strings.IndexByte(vs, '-')
if i >= 0 {
vs = vs[:i]
}
cstrs := strings.Split(vs, ".")
v := make(Version, len(cstrs))
for i, cstr := range cstrs {
cn, err := strconv.Atoi(cstr)
if err != nil {
return nil, fmt.Errorf("could not parse version string: %q is not an integer", cstr)
}
if cn < 0 {
return nil, fmt.Errorf("could not parse version string: %q is negative", cstr)
}
v[i] = cn
}
return v, nil
}

View File

@ -0,0 +1,22 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = ["finder.go"],
importpath = "github.com/bazelbuild/bazel-gazelle/internal/wspace",
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,45 @@
/* Copyright 2016 The Bazel Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package wspace provides functions to locate and modify a bazel WORKSPACE file.
package wspace
import (
"os"
"path/filepath"
"strings"
)
const workspaceFile = "WORKSPACE"
// Find searches from the given dir and up for the WORKSPACE file
// returning the directory containing it, or an error if none found in the tree.
func Find(dir string) (string, error) {
dir, err := filepath.Abs(dir)
if err != nil {
return "", err
}
for {
_, err = os.Stat(filepath.Join(dir, workspaceFile))
if err == nil {
return dir, nil
}
if !os.IsNotExist(err) {
return "", err
}
if strings.HasSuffix(dir, string(os.PathSeparator)) { // stop at root dir
return "", os.ErrNotExist
}
dir = filepath.Dir(dir)
}
}

15
vendor/github.com/bazelbuild/buildtools/CONTRIBUTORS generated vendored Normal file
View File

@ -0,0 +1,15 @@
# People who have agreed to one of the CLAs and can contribute patches.
# The AUTHORS file lists the copyright holders; this file
# lists people. For example, Google employees are listed here
# but not in AUTHORS, because Google holds the copyright.
#
# https://developers.google.com/open-source/cla/individual
# https://developers.google.com/open-source/cla/corporate
#
# Names should be added to this file as:
# Name <email address>
Paul Bethe <pbethe@google.com>
Russ Cox <rsc@google.com>
Laurent Le Brun <laurentlb@google.com>
Justine Alexandra Roberts Tunney <jart@google.com>
Nilton Volpato <nilton@google.com>

13
vendor/github.com/bazelbuild/buildtools/LICENSE generated vendored Normal file
View File

@ -0,0 +1,13 @@
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

32
vendor/github.com/bazelbuild/buildtools/build/BUILD generated vendored Normal file
View File

@ -0,0 +1,32 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"lex.go",
"parse.y.go",
"print.go",
"quote.go",
"rewrite.go",
"rule.go",
"syntax.go",
"walk.go",
],
importpath = "github.com/bazelbuild/buildtools/build",
visibility = ["//visibility:public"],
deps = ["//vendor/github.com/bazelbuild/buildtools/tables:go_default_library"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,52 @@
"""Provides go_yacc and genfile_check_test
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
_GO_YACC_TOOL = "@org_golang_x_tools//cmd/goyacc"
def go_yacc(src, out, visibility=None):
"""Runs go tool yacc -o $out $src."""
native.genrule(
name = src + ".go_yacc",
srcs = [src],
outs = [out],
tools = [_GO_YACC_TOOL],
cmd = ("export GOROOT=$$(dirname $(location " + _GO_YACC_TOOL + "))/..;" +
" $(location " + _GO_YACC_TOOL + ") " +
" -o $(location " + out + ") $(SRCS)"),
visibility = visibility,
local = 1,
)
def genfile_check_test(src, gen):
"""Asserts that any checked-in generated code matches regen."""
if not src:
fail("src is required", "src")
if not gen:
fail("gen is required", "gen")
native.genrule(
name = src + "_checksh",
outs = [src + "_check.sh"],
cmd = "echo 'diff $$@' > $@",
)
native.sh_test(
name = src + "_checkshtest",
size = "small",
srcs = [src + "_check.sh"],
data = [src, gen],
args = ["$(location " + src + ")", "$(location " + gen + ")"],
)

772
vendor/github.com/bazelbuild/buildtools/build/lex.go generated vendored Normal file
View File

@ -0,0 +1,772 @@
/*
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Lexical scanning for BUILD file parser.
package build
import (
"bytes"
"fmt"
"strings"
"unicode/utf8"
)
// Parse parses the input data and returns the corresponding parse tree.
//
// The filename is used only for generating error messages.
func Parse(filename string, data []byte) (*File, error) {
in := newInput(filename, data)
return in.parse()
}
// An input represents a single input file being parsed.
type input struct {
// Lexing state.
filename string // name of input file, for errors
complete []byte // entire input
remaining []byte // remaining input
token []byte // token being scanned
lastToken string // most recently returned token, for error messages
pos Position // current input position
comments []Comment // accumulated comments
endRule int // position of end of current rule
depth int // nesting of [ ] { } ( )
// Parser state.
file *File // returned top-level syntax tree
parseError error // error encountered during parsing
// Comment assignment state.
pre []Expr // all expressions, in preorder traversal
post []Expr // all expressions, in postorder traversal
}
func newInput(filename string, data []byte) *input {
return &input{
filename: filename,
complete: data,
remaining: data,
pos: Position{Line: 1, LineRune: 1, Byte: 0},
}
}
// parse parses the input file.
func (in *input) parse() (f *File, err error) {
// The parser panics for both routine errors like syntax errors
// and for programmer bugs like array index errors.
// Turn both into error returns. Catching bug panics is
// especially important when processing many files.
defer func() {
if e := recover(); e != nil {
if e == in.parseError {
err = in.parseError
} else {
err = fmt.Errorf("%s:%d:%d: internal error: %v", in.filename, in.pos.Line, in.pos.LineRune, e)
}
}
}()
// Invoke the parser generated from parse.y.
yyParse(in)
if in.parseError != nil {
return nil, in.parseError
}
in.file.Path = in.filename
// Assign comments to nearby syntax.
in.assignComments()
return in.file, nil
}
// Error is called to report an error.
// When called by the generated code s is always "syntax error".
// Error does not return: it panics.
func (in *input) Error(s string) {
if s == "syntax error" && in.lastToken != "" {
s += " near " + in.lastToken
}
in.parseError = fmt.Errorf("%s:%d:%d: %v", in.filename, in.pos.Line, in.pos.LineRune, s)
panic(in.parseError)
}
// eof reports whether the input has reached end of file.
func (in *input) eof() bool {
return len(in.remaining) == 0
}
// peekRune returns the next rune in the input without consuming it.
func (in *input) peekRune() int {
if len(in.remaining) == 0 {
return 0
}
r, _ := utf8.DecodeRune(in.remaining)
return int(r)
}
// readRune consumes and returns the next rune in the input.
func (in *input) readRune() int {
if len(in.remaining) == 0 {
in.Error("internal lexer error: readRune at EOF")
}
r, size := utf8.DecodeRune(in.remaining)
in.remaining = in.remaining[size:]
if r == '\n' {
in.pos.Line++
in.pos.LineRune = 1
} else {
in.pos.LineRune++
}
in.pos.Byte += size
return int(r)
}
// startToken marks the beginning of the next input token.
// It must be followed by a call to endToken, once the token has
// been consumed using readRune.
func (in *input) startToken(val *yySymType) {
in.token = in.remaining
val.tok = ""
val.pos = in.pos
}
// yySymType (used in the next few functions) is defined by the
// generated parser. It is a struct containing all the fields listed
// in parse.y's %union [sic] section.
// endToken marks the end of an input token.
// It records the actual token string in val.tok if the caller
// has not done that already.
func (in *input) endToken(val *yySymType) {
if val.tok == "" {
tok := string(in.token[:len(in.token)-len(in.remaining)])
val.tok = tok
in.lastToken = val.tok
}
}
// Lex is called from the generated parser to obtain the next input token.
// It returns the token value (either a rune like '+' or a symbolic token _FOR)
// and sets val to the data associated with the token.
//
// For all our input tokens, the associated data is
// val.Pos (the position where the token begins)
// and val.Token (the input string corresponding to the token).
func (in *input) Lex(val *yySymType) int {
// Skip past spaces, stopping at non-space or EOF.
countNL := 0 // number of newlines we've skipped past
for !in.eof() {
// The parser does not track indentation, because for the most part
// BUILD expressions don't care about how they are indented.
// However, we do need to be able to distinguish
//
// x = y[0]
//
// from the occasional
//
// x = y
// [0]
//
// To handle this one case, when we reach the beginning of a
// top-level BUILD expression, we scan forward to see where
// it should end and record the number of input bytes remaining
// at that endpoint. When we reach that point in the input, we
// insert an implicit semicolon to force the two expressions
// to stay separate.
//
if in.endRule != 0 && len(in.remaining) == in.endRule {
in.endRule = 0
in.lastToken = "implicit ;"
val.tok = ";"
return ';'
}
// Skip over spaces. Count newlines so we can give the parser
// information about where top-level blank lines are,
// for top-level comment assignment.
c := in.peekRune()
if c == ' ' || c == '\t' || c == '\r' || c == '\n' {
if c == '\n' && in.endRule == 0 {
// Not in a rule. Tell parser about top-level blank line.
in.startToken(val)
in.readRune()
in.endToken(val)
return '\n'
}
if c == '\n' {
countNL++
}
in.readRune()
continue
}
// Comment runs to end of line.
if c == '#' {
// Is this comment the only thing on its line?
// Find the last \n before this # and see if it's all
// spaces from there to here.
// If it's a suffix comment but the last non-space symbol before
// it is one of (, [, or {, treat it as a line comment that should be
// put inside the corresponding block.
i := bytes.LastIndex(in.complete[:in.pos.Byte], []byte("\n"))
prefix := bytes.TrimSpace(in.complete[i+1 : in.pos.Byte])
isSuffix := true
if len(prefix) == 0 ||
prefix[len(prefix)-1] == '[' ||
prefix[len(prefix)-1] == '(' ||
prefix[len(prefix)-1] == '{' {
isSuffix = false
}
// Consume comment.
in.startToken(val)
for len(in.remaining) > 0 && in.readRune() != '\n' {
}
in.endToken(val)
val.tok = strings.TrimRight(val.tok, "\n")
in.lastToken = "comment"
// If we are at top level (not in a rule), hand the comment to
// the parser as a _COMMENT token. The grammar is written
// to handle top-level comments itself.
if in.endRule == 0 {
// Not in a rule. Tell parser about top-level comment.
return _COMMENT
}
// Otherwise, save comment for later attachment to syntax tree.
if countNL > 1 {
in.comments = append(in.comments, Comment{val.pos, "", false})
}
in.comments = append(in.comments, Comment{val.pos, val.tok, isSuffix})
countNL = 1
continue
}
if c == '\\' && len(in.remaining) >= 2 && in.remaining[1] == '\n' {
// We can ignore a trailing \ at end of line.
in.readRune()
continue
}
// Found non-space non-comment.
break
}
// Found the beginning of the next token.
in.startToken(val)
defer in.endToken(val)
// End of file.
if in.eof() {
in.lastToken = "EOF"
return _EOF
}
// If endRule is 0, we need to recompute where the end
// of the next rule (Python expression) is, so that we can
// generate a virtual end-of-rule semicolon (see above).
if in.endRule == 0 {
in.endRule = len(in.skipPython(in.remaining))
if in.endRule == 0 {
// skipPython got confused.
// No more virtual semicolons.
in.endRule = -1
}
}
// Punctuation tokens.
switch c := in.peekRune(); c {
case '[', '(', '{':
in.depth++
in.readRune()
return c
case ']', ')', '}':
in.depth--
in.readRune()
return c
case '.', '-', '%', ':', ';', ',', '/', '*': // single-char tokens
in.readRune()
return c
case '<', '>', '=', '!', '+': // possibly followed by =
in.readRune()
if in.peekRune() == '=' {
in.readRune()
switch c {
case '<':
return _LE
case '>':
return _GE
case '=':
return _EQ
case '!':
return _NE
case '+':
return _ADDEQ
}
}
return c
case 'r': // possible beginning of raw quoted string
if len(in.remaining) < 2 || in.remaining[1] != '"' && in.remaining[1] != '\'' {
break
}
in.readRune()
c = in.peekRune()
fallthrough
case '"', '\'': // quoted string
quote := c
if len(in.remaining) >= 3 && in.remaining[0] == byte(quote) && in.remaining[1] == byte(quote) && in.remaining[2] == byte(quote) {
// Triple-quoted string.
in.readRune()
in.readRune()
in.readRune()
var c1, c2, c3 int
for {
if in.eof() {
in.pos = val.pos
in.Error("unexpected EOF in string")
}
c1, c2, c3 = c2, c3, in.readRune()
if c1 == quote && c2 == quote && c3 == quote {
break
}
if c3 == '\\' {
if in.eof() {
in.pos = val.pos
in.Error("unexpected EOF in string")
}
in.readRune()
}
}
} else {
in.readRune()
for {
if in.eof() {
in.pos = val.pos
in.Error("unexpected EOF in string")
}
if in.peekRune() == '\n' {
in.Error("unexpected newline in string")
}
c := in.readRune()
if c == quote {
break
}
if c == '\\' {
if in.eof() {
in.pos = val.pos
in.Error("unexpected EOF in string")
}
in.readRune()
}
}
}
in.endToken(val)
s, triple, err := unquote(val.tok)
if err != nil {
in.Error(fmt.Sprint(err))
}
val.str = s
val.triple = triple
return _STRING
}
// Checked all punctuation. Must be identifier token.
if c := in.peekRune(); !isIdent(c) {
in.Error(fmt.Sprintf("unexpected input character %#q", c))
}
// Look for raw Python block (class, def, if, etc at beginning of line) and pass through.
if in.depth == 0 && in.pos.LineRune == 1 && hasPythonPrefix(in.remaining) {
// Find end of Python block and advance input beyond it.
// Have to loop calling readRune in order to maintain line number info.
rest := in.skipPython(in.remaining)
for len(in.remaining) > len(rest) {
in.readRune()
}
return _PYTHON
}
// Scan over alphanumeric identifier.
for {
c := in.peekRune()
if !isIdent(c) {
break
}
in.readRune()
}
// Call endToken to set val.tok to identifier we just scanned,
// so we can look to see if val.tok is a keyword.
in.endToken(val)
if k := keywordToken[val.tok]; k != 0 {
return k
}
return _IDENT
}
// isIdent reports whether c is an identifier rune.
// We treat all non-ASCII runes as identifier runes.
func isIdent(c int) bool {
return '0' <= c && c <= '9' ||
'A' <= c && c <= 'Z' ||
'a' <= c && c <= 'z' ||
c == '_' ||
c >= 0x80
}
// keywordToken records the special tokens for
// strings that should not be treated as ordinary identifiers.
var keywordToken = map[string]int{
"and": _AND,
"for": _FOR,
"if": _IF,
"else": _ELSE,
"in": _IN,
"is": _IS,
"lambda": _LAMBDA,
"not": _NOT,
"or": _OR,
}
// Python scanning.
// About 1% of BUILD files embed arbitrary Python into the file.
// We do not attempt to parse it. Instead, we lex just enough to scan
// beyond it, treating the Python block as an unintepreted blob.
// hasPythonPrefix reports whether p begins with a keyword that would
// introduce an uninterpreted Python block.
func hasPythonPrefix(p []byte) bool {
for _, pre := range prefixes {
if hasPrefixSpace(p, pre) {
return true
}
}
return false
}
// These keywords introduce uninterpreted Python blocks.
var prefixes = []string{
"assert",
"class",
"def",
"del",
"for",
"if",
"try",
}
// hasPrefixSpace reports whether p begins with pre followed by a space or colon.
func hasPrefixSpace(p []byte, pre string) bool {
if len(p) <= len(pre) || p[len(pre)] != ' ' && p[len(pre)] != '\t' && p[len(pre)] != ':' {
return false
}
for i := range pre {
if p[i] != pre[i] {
return false
}
}
return true
}
func isBlankOrComment(b []byte) bool {
for _, c := range b {
if c == '#' || c == '\n' {
return true
}
if c != ' ' && c != '\t' && c != '\r' {
return false
}
}
return true
}
// hasPythonContinuation reports whether p begins with a keyword that
// continues an uninterpreted Python block.
func hasPythonContinuation(p []byte) bool {
for _, pre := range continuations {
if hasPrefixSpace(p, pre) {
return true
}
}
return false
}
// These keywords continue uninterpreted Python blocks.
var continuations = []string{
"except",
"else",
}
// skipPython returns the data remaining after the uninterpreted
// Python block beginning at p. It does not advance the input position.
// (The only reason for the input receiver is to be able to call in.Error.)
func (in *input) skipPython(p []byte) []byte {
quote := byte(0) // if non-zero, the kind of quote we're in
tripleQuote := false // if true, the quote is a triple quote
depth := 0 // nesting depth for ( ) [ ] { }
var rest []byte // data after the Python block
// Scan over input one byte at a time until we find
// an unindented, non-blank, non-comment line
// outside quoted strings and brackets.
for i := 0; i < len(p); i++ {
c := p[i]
if quote != 0 && c == quote && !tripleQuote {
quote = 0
continue
}
if quote != 0 && c == quote && tripleQuote && i+2 < len(p) && p[i+1] == quote && p[i+2] == quote {
i += 2
quote = 0
tripleQuote = false
continue
}
if quote != 0 {
if c == '\\' {
i++ // skip escaped char
}
continue
}
if c == '\'' || c == '"' {
if i+2 < len(p) && p[i+1] == c && p[i+2] == c {
quote = c
tripleQuote = true
i += 2
continue
}
quote = c
continue
}
if depth == 0 && i > 0 && p[i-1] == '\n' && (i < 2 || p[i-2] != '\\') {
// Possible stopping point. Save the earliest one we find.
if rest == nil {
rest = p[i:]
}
if !isBlankOrComment(p[i:]) {
if !hasPythonContinuation(p[i:]) && c != ' ' && c != '\t' {
// Yes, stop here.
break
}
// Not a stopping point after all.
rest = nil
}
}
switch c {
case '#':
// Skip comment.
for i < len(p) && p[i] != '\n' {
i++
}
case '(', '[', '{':
depth++
case ')', ']', '}':
depth--
}
}
if quote != 0 {
in.Error("EOF scanning Python quoted string")
}
return rest
}
// Comment assignment.
// We build two lists of all subexpressions, preorder and postorder.
// The preorder list is ordered by start location, with outer expressions first.
// The postorder list is ordered by end location, with outer expressions last.
// We use the preorder list to assign each whole-line comment to the syntax
// immediately following it, and we use the postorder list to assign each
// end-of-line comment to the syntax immediately preceding it.
// order walks the expression adding it and its subexpressions to the
// preorder and postorder lists.
func (in *input) order(v Expr) {
if v != nil {
in.pre = append(in.pre, v)
}
switch v := v.(type) {
default:
panic(fmt.Errorf("order: unexpected type %T", v))
case nil:
// nothing
case *End:
// nothing
case *File:
for _, stmt := range v.Stmt {
in.order(stmt)
}
case *CommentBlock:
// nothing
case *CallExpr:
in.order(v.X)
for _, x := range v.List {
in.order(x)
}
in.order(&v.End)
case *PythonBlock:
// nothing
case *LiteralExpr:
// nothing
case *StringExpr:
// nothing
case *DotExpr:
in.order(v.X)
case *ListExpr:
for _, x := range v.List {
in.order(x)
}
in.order(&v.End)
case *ListForExpr:
in.order(v.X)
for _, c := range v.For {
in.order(c)
}
in.order(&v.End)
case *SetExpr:
for _, x := range v.List {
in.order(x)
}
in.order(&v.End)
case *ForClauseWithIfClausesOpt:
in.order(v.For)
for _, c := range v.Ifs {
in.order(c)
}
case *ForClause:
for _, name := range v.Var {
in.order(name)
}
in.order(v.Expr)
case *IfClause:
in.order(v.Cond)
case *KeyValueExpr:
in.order(v.Key)
in.order(v.Value)
case *DictExpr:
for _, x := range v.List {
in.order(x)
}
in.order(&v.End)
case *TupleExpr:
for _, x := range v.List {
in.order(x)
}
in.order(&v.End)
case *UnaryExpr:
in.order(v.X)
case *BinaryExpr:
in.order(v.X)
in.order(v.Y)
case *ConditionalExpr:
in.order(v.Then)
in.order(v.Test)
in.order(v.Else)
case *ParenExpr:
in.order(v.X)
in.order(&v.End)
case *SliceExpr:
in.order(v.X)
in.order(v.Y)
in.order(v.Z)
case *IndexExpr:
in.order(v.X)
in.order(v.Y)
case *LambdaExpr:
for _, name := range v.Var {
in.order(name)
}
in.order(v.Expr)
}
if v != nil {
in.post = append(in.post, v)
}
}
// assignComments attaches comments to nearby syntax.
func (in *input) assignComments() {
// Generate preorder and postorder lists.
in.order(in.file)
// Split into whole-line comments and suffix comments.
var line, suffix []Comment
for _, com := range in.comments {
if com.Suffix {
suffix = append(suffix, com)
} else {
line = append(line, com)
}
}
// Assign line comments to syntax immediately following.
for _, x := range in.pre {
start, _ := x.Span()
xcom := x.Comment()
for len(line) > 0 && start.Byte >= line[0].Start.Byte {
xcom.Before = append(xcom.Before, line[0])
line = line[1:]
}
}
// Remaining line comments go at end of file.
in.file.After = append(in.file.After, line...)
// Assign suffix comments to syntax immediately before.
for i := len(in.post) - 1; i >= 0; i-- {
x := in.post[i]
// Do not assign suffix comments to file
switch x.(type) {
case *File:
continue
}
_, end := x.Span()
xcom := x.Comment()
for len(suffix) > 0 && end.Byte <= suffix[len(suffix)-1].Start.Byte {
xcom.Suffix = append(xcom.Suffix, suffix[len(suffix)-1])
suffix = suffix[:len(suffix)-1]
}
}
// We assigned suffix comments in reverse.
// If multiple suffix comments were appended to the same
// expression node, they are now in reverse. Fix that.
for _, x := range in.post {
reverseComments(x.Comment().Suffix)
}
// Remaining suffix comments go at beginning of file.
in.file.Before = append(in.file.Before, suffix...)
}
// reverseComments reverses the []Comment list.
func reverseComments(list []Comment) {
for i, j := 0, len(list)-1; i < j; i, j = i+1, j-1 {
list[i], list[j] = list[j], list[i]
}
}

684
vendor/github.com/bazelbuild/buildtools/build/parse.y generated vendored Normal file
View File

@ -0,0 +1,684 @@
// BUILD file parser.
// This is a yacc grammar. Its lexer is in lex.go.
//
// For a good introduction to writing yacc grammars, see
// Kernighan and Pike's book The Unix Programming Environment.
//
// The definitive yacc manual is
// Stephen C. Johnson and Ravi Sethi, "Yacc: A Parser Generator",
// online at http://plan9.bell-labs.com/sys/doc/yacc.pdf.
%{
package build
%}
// The generated parser puts these fields in a struct named yySymType.
// (The name %union is historical, but it is inaccurate for Go.)
%union {
// input tokens
tok string // raw input syntax
str string // decoding of quoted string
pos Position // position of token
triple bool // was string triple quoted?
// partial syntax trees
expr Expr
exprs []Expr
forc *ForClause
ifs []*IfClause
forifs *ForClauseWithIfClausesOpt
forsifs []*ForClauseWithIfClausesOpt
string *StringExpr
strings []*StringExpr
// supporting information
comma Position // position of trailing comma in list, if present
lastRule Expr // most recent rule, to attach line comments to
}
// These declarations set the type for a $ reference ($$, $1, $2, ...)
// based on the kind of symbol it refers to. Other fields can be referred
// to explicitly, as in $<tok>1.
//
// %token is for input tokens generated by the lexer.
// %type is for higher-level grammar rules defined here.
//
// It is possible to put multiple tokens per line, but it is easier to
// keep ordered using a sparser one-per-line list.
%token <pos> '%'
%token <pos> '('
%token <pos> ')'
%token <pos> '*'
%token <pos> '+'
%token <pos> ','
%token <pos> '-'
%token <pos> '.'
%token <pos> '/'
%token <pos> ':'
%token <pos> '<'
%token <pos> '='
%token <pos> '>'
%token <pos> '['
%token <pos> ']'
%token <pos> '{'
%token <pos> '}'
// By convention, yacc token names are all caps.
// However, we do not want to export them from the Go package
// we are creating, so prefix them all with underscores.
%token <pos> _ADDEQ // operator +=
%token <pos> _AND // keyword and
%token <pos> _COMMENT // top-level # comment
%token <pos> _EOF // end of file
%token <pos> _EQ // operator ==
%token <pos> _FOR // keyword for
%token <pos> _GE // operator >=
%token <pos> _IDENT // non-keyword identifier or number
%token <pos> _IF // keyword if
%token <pos> _ELSE // keyword else
%token <pos> _IN // keyword in
%token <pos> _IS // keyword is
%token <pos> _LAMBDA // keyword lambda
%token <pos> _LE // operator <=
%token <pos> _NE // operator !=
%token <pos> _NOT // keyword not
%token <pos> _OR // keyword or
%token <pos> _PYTHON // uninterpreted Python block
%token <pos> _STRING // quoted string
%type <pos> comma_opt
%type <expr> expr
%type <expr> expr_opt
%type <exprs> exprs
%type <exprs> exprs_opt
%type <forc> for_clause
%type <forifs> for_clause_with_if_clauses_opt
%type <forsifs> for_clauses_with_if_clauses_opt
%type <expr> ident
%type <exprs> idents
%type <ifs> if_clauses_opt
%type <exprs> stmts
%type <expr> stmt
%type <expr> keyvalue
%type <exprs> keyvalues
%type <exprs> keyvalues_no_comma
%type <string> string
%type <strings> strings
// Operator precedence.
// Operators listed lower in the table bind tighter.
// We tag rules with this fake, low precedence to indicate
// that when the rule is involved in a shift/reduce
// conflict, we prefer that the parser shift (try for a longer parse).
// Shifting is the default resolution anyway, but stating it explicitly
// silences yacc's warning for that specific case.
%left ShiftInstead
%left '\n'
%left _ASSERT
// '=' and '+=' have the lowest precedence
// e.g. "x = a if c > 0 else 'bar'"
// followed by
// 'if' and 'else' which have lower precedence than all other operators.
// e.g. "a, b if c > 0 else 'foo'" is either a tuple of (a,b) or 'foo'
// and not a tuple of "(a, (b if ... ))"
%left '=' _ADDEQ
%left _IF _ELSE
%left ','
%left ':'
%left _IN _NOT _IS
%left _OR
%left _AND
%left '<' '>' _EQ _NE _LE _GE
%left '+' '-'
%left '*' '/' '%'
%left '.' '[' '('
%right _UNARY
%left _STRING
%%
// Grammar rules.
//
// A note on names: if foo is a rule, then foos is a sequence of foos
// (with interleaved commas or other syntax as appropriate)
// and foo_opt is an optional foo.
file:
stmts _EOF
{
yylex.(*input).file = &File{Stmt: $1}
return 0
}
stmts:
{
$$ = nil
$<lastRule>$ = nil
}
| stmts stmt comma_opt semi_opt
{
// If this statement follows a comment block,
// attach the comments to the statement.
if cb, ok := $<lastRule>1.(*CommentBlock); ok {
$$ = $1
$$[len($1)-1] = $2
$2.Comment().Before = cb.After
$<lastRule>$ = $2
break
}
// Otherwise add to list.
$$ = append($1, $2)
$<lastRule>$ = $2
// Consider this input:
//
// foo()
// # bar
// baz()
//
// If we've just parsed baz(), the # bar is attached to
// foo() as an After comment. Make it a Before comment
// for baz() instead.
if x := $<lastRule>1; x != nil {
com := x.Comment()
$2.Comment().Before = com.After
com.After = nil
}
}
| stmts '\n'
{
// Blank line; sever last rule from future comments.
$$ = $1
$<lastRule>$ = nil
}
| stmts _COMMENT
{
$$ = $1
$<lastRule>$ = $<lastRule>1
if $<lastRule>$ == nil {
cb := &CommentBlock{Start: $2}
$$ = append($$, cb)
$<lastRule>$ = cb
}
com := $<lastRule>$.Comment()
com.After = append(com.After, Comment{Start: $2, Token: $<tok>2})
}
stmt:
expr %prec ShiftInstead
| _PYTHON
{
$$ = &PythonBlock{Start: $1, Token: $<tok>1}
}
semi_opt:
| semi_opt ';'
expr:
ident
| strings %prec ShiftInstead
{
if len($1) == 1 {
$$ = $1[0]
break
}
$$ = $1[0]
for _, x := range $1[1:] {
_, end := $$.Span()
$$ = binary($$, end, "+", x)
}
}
| '[' exprs_opt ']'
{
$$ = &ListExpr{
Start: $1,
List: $2,
Comma: $<comma>2,
End: End{Pos: $3},
ForceMultiLine: forceMultiLine($1, $2, $3),
}
}
| '[' expr for_clauses_with_if_clauses_opt ']'
{
exprStart, _ := $2.Span()
$$ = &ListForExpr{
Brack: "[]",
Start: $1,
X: $2,
For: $3,
End: End{Pos: $4},
ForceMultiLine: $1.Line != exprStart.Line,
}
}
| '(' expr for_clauses_with_if_clauses_opt ')'
{
exprStart, _ := $2.Span()
$$ = &ListForExpr{
Brack: "()",
Start: $1,
X: $2,
For: $3,
End: End{Pos: $4},
ForceMultiLine: $1.Line != exprStart.Line,
}
}
| '{' keyvalue for_clauses_with_if_clauses_opt '}'
{
exprStart, _ := $2.Span()
$$ = &ListForExpr{
Brack: "{}",
Start: $1,
X: $2,
For: $3,
End: End{Pos: $4},
ForceMultiLine: $1.Line != exprStart.Line,
}
}
| '{' keyvalues '}'
{
$$ = &DictExpr{
Start: $1,
List: $2,
Comma: $<comma>2,
End: End{Pos: $3},
ForceMultiLine: forceMultiLine($1, $2, $3),
}
}
| '{' exprs_opt '}'
{
$$ = &SetExpr{
Start: $1,
List: $2,
Comma: $<comma>2,
End: End{Pos: $3},
ForceMultiLine: forceMultiLine($1, $2, $3),
}
}
| '(' exprs_opt ')'
{
if len($2) == 1 && $<comma>2.Line == 0 {
// Just a parenthesized expression, not a tuple.
$$ = &ParenExpr{
Start: $1,
X: $2[0],
End: End{Pos: $3},
ForceMultiLine: forceMultiLine($1, $2, $3),
}
} else {
$$ = &TupleExpr{
Start: $1,
List: $2,
Comma: $<comma>2,
End: End{Pos: $3},
ForceCompact: forceCompact($1, $2, $3),
ForceMultiLine: forceMultiLine($1, $2, $3),
}
}
}
| expr '.' _IDENT
{
$$ = &DotExpr{
X: $1,
Dot: $2,
NamePos: $3,
Name: $<tok>3,
}
}
| expr '(' exprs_opt ')'
{
$$ = &CallExpr{
X: $1,
ListStart: $2,
List: $3,
End: End{Pos: $4},
ForceCompact: forceCompact($2, $3, $4),
ForceMultiLine: forceMultiLine($2, $3, $4),
}
}
| expr '(' expr for_clauses_with_if_clauses_opt ')'
{
$$ = &CallExpr{
X: $1,
ListStart: $2,
List: []Expr{
&ListForExpr{
Brack: "",
Start: $2,
X: $3,
For: $4,
End: End{Pos: $5},
},
},
End: End{Pos: $5},
}
}
| expr '[' expr ']'
{
$$ = &IndexExpr{
X: $1,
IndexStart: $2,
Y: $3,
End: $4,
}
}
| expr '[' expr_opt ':' expr_opt ']'
{
$$ = &SliceExpr{
X: $1,
SliceStart: $2,
Y: $3,
Colon: $4,
Z: $5,
End: $6,
}
}
| _LAMBDA exprs ':' expr
{
$$ = &LambdaExpr{
Lambda: $1,
Var: $2,
Colon: $3,
Expr: $4,
}
}
| '-' expr %prec _UNARY { $$ = unary($1, $<tok>1, $2) }
| _NOT expr %prec _UNARY { $$ = unary($1, $<tok>1, $2) }
| '*' expr %prec _UNARY { $$ = unary($1, $<tok>1, $2) }
| expr '*' expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr '%' expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr '/' expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr '+' expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr '-' expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr '<' expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr '>' expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr _EQ expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr _LE expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr _NE expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr _GE expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr '=' expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr _ADDEQ expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr _IN expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr _NOT _IN expr { $$ = binary($1, $2, "not in", $4) }
| expr _OR expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr _AND expr { $$ = binary($1, $2, $<tok>2, $3) }
| expr _IS expr
{
if b, ok := $3.(*UnaryExpr); ok && b.Op == "not" {
$$ = binary($1, $2, "is not", b.X)
} else {
$$ = binary($1, $2, $<tok>2, $3)
}
}
| expr _IF expr _ELSE expr
{
$$ = &ConditionalExpr{
Then: $1,
IfStart: $2,
Test: $3,
ElseStart: $4,
Else: $5,
}
}
expr_opt:
{
$$ = nil
}
| expr
// comma_opt is an optional comma. If the comma is present,
// the rule's value is the position of the comma. Otherwise
// the rule's value is the zero position. Tracking this
// lets us distinguish (x) and (x,).
comma_opt:
{
$$ = Position{}
}
| ','
keyvalue:
expr ':' expr {
$$ = &KeyValueExpr{
Key: $1,
Colon: $2,
Value: $3,
}
}
keyvalues_no_comma:
keyvalue
{
$$ = []Expr{$1}
}
| keyvalues_no_comma ',' keyvalue
{
$$ = append($1, $3)
}
keyvalues:
keyvalues_no_comma
{
$$ = $1
}
| keyvalues_no_comma ','
{
$$ = $1
}
exprs:
expr
{
$$ = []Expr{$1}
}
| exprs ',' expr
{
$$ = append($1, $3)
}
exprs_opt:
{
$$, $<comma>$ = nil, Position{}
}
| exprs comma_opt
{
$$, $<comma>$ = $1, $2
}
string:
_STRING
{
$$ = &StringExpr{
Start: $1,
Value: $<str>1,
TripleQuote: $<triple>1,
End: $1.add($<tok>1),
Token: $<tok>1,
}
}
strings:
string
{
$$ = []*StringExpr{$1}
}
| strings string
{
$$ = append($1, $2)
}
ident:
_IDENT
{
$$ = &LiteralExpr{Start: $1, Token: $<tok>1}
}
idents:
ident
{
$$ = []Expr{$1}
}
| idents ',' ident
{
$$ = append($1, $3)
}
for_clause:
_FOR idents _IN expr
{
$$ = &ForClause{
For: $1,
Var: $2,
In: $3,
Expr: $4,
}
}
| _FOR '(' idents ')' _IN expr
{
$$ = &ForClause{
For: $1,
Var: $3,
In: $5,
Expr: $6,
}
}
for_clause_with_if_clauses_opt:
for_clause if_clauses_opt {
$$ = &ForClauseWithIfClausesOpt{
For: $1,
Ifs: $2,
}
}
for_clauses_with_if_clauses_opt:
for_clause_with_if_clauses_opt
{
$$ = []*ForClauseWithIfClausesOpt{$1}
}
| for_clauses_with_if_clauses_opt for_clause_with_if_clauses_opt {
$$ = append($1, $2)
}
if_clauses_opt:
{
$$ = nil
}
| if_clauses_opt _IF expr
{
$$ = append($1, &IfClause{
If: $2,
Cond: $3,
})
}
%%
// Go helper code.
// unary returns a unary expression with the given
// position, operator, and subexpression.
func unary(pos Position, op string, x Expr) Expr {
return &UnaryExpr{
OpStart: pos,
Op: op,
X: x,
}
}
// binary returns a binary expression with the given
// operands, position, and operator.
func binary(x Expr, pos Position, op string, y Expr) Expr {
_, xend := x.Span()
ystart, _ := y.Span()
return &BinaryExpr{
X: x,
OpStart: pos,
Op: op,
LineBreak: xend.Line < ystart.Line,
Y: y,
}
}
// forceCompact returns the setting for the ForceCompact field for a call or tuple.
//
// NOTE 1: The field is called ForceCompact, not ForceSingleLine,
// because it only affects the formatting associated with the call or tuple syntax,
// not the formatting of the arguments. For example:
//
// call([
// 1,
// 2,
// 3,
// ])
//
// is still a compact call even though it runs on multiple lines.
//
// In contrast the multiline form puts a linebreak after the (.
//
// call(
// [
// 1,
// 2,
// 3,
// ],
// )
//
// NOTE 2: Because of NOTE 1, we cannot use start and end on the
// same line as a signal for compact mode: the formatting of an
// embedded list might move the end to a different line, which would
// then look different on rereading and cause buildifier not to be
// idempotent. Instead, we have to look at properties guaranteed
// to be preserved by the reformatting, namely that the opening
// paren and the first expression are on the same line and that
// each subsequent expression begins on the same line as the last
// one ended (no line breaks after comma).
func forceCompact(start Position, list []Expr, end Position) bool {
if len(list) <= 1 {
// The call or tuple will probably be compact anyway; don't force it.
return false
}
// If there are any named arguments or non-string, non-literal
// arguments, cannot force compact mode.
line := start.Line
for _, x := range list {
start, end := x.Span()
if start.Line != line {
return false
}
line = end.Line
switch x.(type) {
case *LiteralExpr, *StringExpr, *UnaryExpr:
// ok
default:
return false
}
}
return end.Line == line
}
// forceMultiLine returns the setting for the ForceMultiLine field.
func forceMultiLine(start Position, list []Expr, end Position) bool {
if len(list) > 1 {
// The call will be multiline anyway, because it has multiple elements. Don't force it.
return false
}
if len(list) == 0 {
// Empty list: use position of brackets.
return start.Line != end.Line
}
// Single-element list.
// Check whether opening bracket is on different line than beginning of
// element, or closing bracket is on different line than end of element.
elemStart, elemEnd := list[0].Span()
return start.Line != elemStart.Line || end.Line != elemEnd.Line
}

1306
vendor/github.com/bazelbuild/buildtools/build/parse.y.go generated vendored Executable file

File diff suppressed because it is too large Load Diff

614
vendor/github.com/bazelbuild/buildtools/build/print.go generated vendored Normal file
View File

@ -0,0 +1,614 @@
/*
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Printing of syntax trees.
package build
import (
"bytes"
"fmt"
"strings"
)
// Format returns the formatted form of the given BUILD file.
func Format(f *File) []byte {
pr := &printer{}
pr.file(f)
return pr.Bytes()
}
// FormatString returns the string form of the given expression.
func FormatString(x Expr) string {
pr := &printer{}
switch x := x.(type) {
case *File:
pr.file(x)
default:
pr.expr(x, precLow)
}
return pr.String()
}
// A printer collects the state during printing of a file or expression.
type printer struct {
bytes.Buffer // output buffer
comment []Comment // pending end-of-line comments
margin int // left margin (indent), a number of spaces
depth int // nesting depth inside ( ) [ ] { }
}
// printf prints to the buffer.
func (p *printer) printf(format string, args ...interface{}) {
fmt.Fprintf(p, format, args...)
}
// indent returns the position on the current line, in bytes, 0-indexed.
func (p *printer) indent() int {
b := p.Bytes()
n := 0
for n < len(b) && b[len(b)-1-n] != '\n' {
n++
}
return n
}
// newline ends the current line, flushing end-of-line comments.
// It must only be called when printing a newline is known to be safe:
// when not inside an expression or when p.depth > 0.
// To break a line inside an expression that might not be enclosed
// in brackets of some kind, use breakline instead.
func (p *printer) newline() {
if len(p.comment) > 0 {
p.printf(" ")
for i, com := range p.comment {
if i > 0 {
p.trim()
p.printf("\n%*s", p.margin, "")
}
p.printf("%s", strings.TrimSpace(com.Token))
}
p.comment = p.comment[:0]
}
p.trim()
p.printf("\n%*s", p.margin, "")
}
// breakline breaks the current line, inserting a continuation \ if needed.
// If no continuation \ is needed, breakline flushes end-of-line comments.
func (p *printer) breakline() {
if p.depth == 0 {
// Cannot have both final \ and comments.
p.printf(" \\\n%*s", p.margin, "")
return
}
// Safe to use newline.
p.newline()
}
// trim removes trailing spaces from the current line.
func (p *printer) trim() {
// Remove trailing space from line we're about to end.
b := p.Bytes()
n := len(b)
for n > 0 && b[n-1] == ' ' {
n--
}
p.Truncate(n)
}
// file formats the given file into the print buffer.
func (p *printer) file(f *File) {
for _, com := range f.Before {
p.printf("%s", strings.TrimSpace(com.Token))
p.newline()
}
for i, stmt := range f.Stmt {
switch stmt := stmt.(type) {
case *CommentBlock:
// comments already handled
case *PythonBlock:
for _, com := range stmt.Before {
p.printf("%s", strings.TrimSpace(com.Token))
p.newline()
}
p.printf("%s", stmt.Token) // includes trailing newline
default:
p.expr(stmt, precLow)
p.newline()
}
for _, com := range stmt.Comment().After {
p.printf("%s", strings.TrimSpace(com.Token))
p.newline()
}
if i+1 < len(f.Stmt) && !compactStmt(stmt, f.Stmt[i+1]) {
p.newline()
}
}
for _, com := range f.After {
p.printf("%s", strings.TrimSpace(com.Token))
p.newline()
}
}
// compactStmt reports whether the pair of statements s1, s2
// should be printed without an intervening blank line.
// We omit the blank line when both are subinclude statements
// and the second one has no leading comments.
func compactStmt(s1, s2 Expr) bool {
if len(s2.Comment().Before) > 0 {
return false
}
return (isCall(s1, "subinclude") || isCall(s1, "load")) &&
(isCall(s2, "subinclude") || isCall(s2, "load"))
}
// isCall reports whether x is a call to a function with the given name.
func isCall(x Expr, name string) bool {
c, ok := x.(*CallExpr)
if !ok {
return false
}
nam, ok := c.X.(*LiteralExpr)
if !ok {
return false
}
return nam.Token == name
}
// Expression formatting.
// The expression formatter must introduce parentheses to force the
// meaning described by the parse tree. We preserve parentheses in the
// input, so extra parentheses are only needed if we have edited the tree.
//
// For example consider these expressions:
// (1) "x" "y" % foo
// (2) "x" + "y" % foo
// (3) "x" + ("y" % foo)
// (4) ("x" + "y") % foo
// When we parse (1), we represent the concatenation as an addition.
// However, if we print the addition back out without additional parens,
// as in (2), it has the same meaning as (3), which is not the original
// meaning. To preserve the original meaning we must add parens as in (4).
//
// To allow arbitrary rewrites to be formatted properly, we track full
// operator precedence while printing instead of just handling this one
// case of string concatenation.
//
// The precedences are assigned values low to high. A larger number
// binds tighter than a smaller number. All binary operators bind
// left-to-right.
const (
precLow = iota
precAssign
precComma
precColon
precIn
precOr
precAnd
precCmp
precAdd
precMultiply
precSuffix
precUnary
precConcat
)
// opPrec gives the precedence for operators found in a BinaryExpr.
var opPrec = map[string]int{
"=": precAssign,
"+=": precAssign,
"or": precOr,
"and": precAnd,
"<": precCmp,
">": precCmp,
"==": precCmp,
"!=": precCmp,
"<=": precCmp,
">=": precCmp,
"+": precAdd,
"-": precAdd,
"*": precMultiply,
"/": precMultiply,
"%": precMultiply,
}
// expr prints the expression v to the print buffer.
// The value outerPrec gives the precedence of the operator
// outside expr. If that operator binds tighter than v's operator,
// expr must introduce parentheses to preserve the meaning
// of the parse tree (see above).
func (p *printer) expr(v Expr, outerPrec int) {
// Emit line-comments preceding this expression.
// If we are in the middle of an expression but not inside ( ) [ ] { }
// then we cannot just break the line: we'd have to end it with a \.
// However, even then we can't emit line comments since that would
// end the expression. This is only a concern if we have rewritten
// the parse tree. If comments were okay before this expression in
// the original input they're still okay now, in the absense of rewrites.
//
// TODO(bazel-team): Check whether it is valid to emit comments right now,
// and if not, insert them earlier in the output instead, at the most
// recent \n not following a \ line.
if before := v.Comment().Before; len(before) > 0 {
// Want to print a line comment.
// Line comments must be at the current margin.
p.trim()
if p.indent() > 0 {
// There's other text on the line. Start a new line.
p.printf("\n")
}
// Re-indent to margin.
p.printf("%*s", p.margin, "")
for _, com := range before {
p.printf("%s", strings.TrimSpace(com.Token))
p.newline()
}
}
// Do we introduce parentheses?
// The result depends on the kind of expression.
// Each expression type that might need parentheses
// calls addParen with its own precedence.
// If parentheses are necessary, addParen prints the
// opening parenthesis and sets parenthesized so that
// the code after the switch can print the closing one.
parenthesized := false
addParen := func(prec int) {
if prec < outerPrec {
p.printf("(")
p.depth++
parenthesized = true
}
}
switch v := v.(type) {
default:
panic(fmt.Errorf("printer: unexpected type %T", v))
case *LiteralExpr:
p.printf("%s", v.Token)
case *StringExpr:
// If the Token is a correct quoting of Value, use it.
// This preserves the specific escaping choices that
// BUILD authors have made, and it also works around
// b/7272572.
if strings.HasPrefix(v.Token, `"`) {
s, triple, err := unquote(v.Token)
if s == v.Value && triple == v.TripleQuote && err == nil {
p.printf("%s", v.Token)
break
}
}
p.printf("%s", quote(v.Value, v.TripleQuote))
case *DotExpr:
addParen(precSuffix)
p.expr(v.X, precSuffix)
p.printf(".%s", v.Name)
case *IndexExpr:
addParen(precSuffix)
p.expr(v.X, precSuffix)
p.printf("[")
p.expr(v.Y, precLow)
p.printf("]")
case *KeyValueExpr:
p.expr(v.Key, precLow)
p.printf(": ")
p.expr(v.Value, precLow)
case *SliceExpr:
addParen(precSuffix)
p.expr(v.X, precSuffix)
p.printf("[")
if v.Y != nil {
p.expr(v.Y, precLow)
}
p.printf(":")
if v.Z != nil {
p.expr(v.Z, precLow)
}
p.printf("]")
case *UnaryExpr:
addParen(precUnary)
if v.Op == "not" {
p.printf("not ") // Requires a space after it.
} else {
p.printf("%s", v.Op)
}
p.expr(v.X, precUnary)
case *LambdaExpr:
addParen(precColon)
p.printf("lambda ")
for i, name := range v.Var {
if i > 0 {
p.printf(", ")
}
p.expr(name, precLow)
}
p.printf(": ")
p.expr(v.Expr, precColon)
case *BinaryExpr:
// Precedence: use the precedence of the operator.
// Since all binary expressions format left-to-right,
// it is okay for the left side to reuse the same operator
// without parentheses, so we use prec for v.X.
// For the same reason, the right side cannot reuse the same
// operator, or else a parse tree for a + (b + c), where the ( ) are
// not present in the source, will format as a + b + c, which
// means (a + b) + c. Treat the right expression as appearing
// in a context one precedence level higher: use prec+1 for v.Y.
//
// Line breaks: if we are to break the line immediately after
// the operator, introduce a margin at the current column,
// so that the second operand lines up with the first one and
// also so that neither operand can use space to the left.
// If the operator is an =, indent the right side another 4 spaces.
prec := opPrec[v.Op]
addParen(prec)
m := p.margin
if v.LineBreak {
p.margin = p.indent()
if v.Op == "=" {
p.margin += 4
}
}
p.expr(v.X, prec)
p.printf(" %s", v.Op)
if v.LineBreak {
p.breakline()
} else {
p.printf(" ")
}
p.expr(v.Y, prec+1)
p.margin = m
case *ParenExpr:
p.seq("()", []Expr{v.X}, &v.End, modeParen, false, v.ForceMultiLine)
case *CallExpr:
addParen(precSuffix)
p.expr(v.X, precSuffix)
p.seq("()", v.List, &v.End, modeCall, v.ForceCompact, v.ForceMultiLine)
case *ListExpr:
p.seq("[]", v.List, &v.End, modeList, false, v.ForceMultiLine)
case *SetExpr:
p.seq("{}", v.List, &v.End, modeList, false, v.ForceMultiLine)
case *TupleExpr:
p.seq("()", v.List, &v.End, modeTuple, v.ForceCompact, v.ForceMultiLine)
case *DictExpr:
var list []Expr
for _, x := range v.List {
list = append(list, x)
}
p.seq("{}", list, &v.End, modeDict, false, v.ForceMultiLine)
case *ListForExpr:
p.listFor(v)
case *ConditionalExpr:
addParen(precSuffix)
p.expr(v.Then, precSuffix)
p.printf(" if ")
p.expr(v.Test, precSuffix)
p.printf(" else ")
p.expr(v.Else, precSuffix)
}
// Add closing parenthesis if needed.
if parenthesized {
p.depth--
p.printf(")")
}
// Queue end-of-line comments for printing when we
// reach the end of the line.
p.comment = append(p.comment, v.Comment().Suffix...)
}
// A seqMode describes a formatting mode for a sequence of values,
// like a list or call arguments.
type seqMode int
const (
_ seqMode = iota
modeCall // f(x)
modeList // [x]
modeTuple // (x,)
modeParen // (x)
modeDict // {x:y}
)
// seq formats a list of values inside a given bracket pair (brack = "()", "[]", "{}").
// The end node holds any trailing comments to be printed just before the
// closing bracket.
// The mode parameter specifies the sequence mode (see above).
// If multiLine is true, seq avoids the compact form even
// for 0- and 1-element sequences.
func (p *printer) seq(brack string, list []Expr, end *End, mode seqMode, forceCompact, forceMultiLine bool) {
p.printf("%s", brack[:1])
p.depth++
// If there are line comments, force multiline
// so we can print the comments before the closing bracket.
for _, x := range list {
if len(x.Comment().Before) > 0 {
forceMultiLine = true
}
}
if len(end.Before) > 0 {
forceMultiLine = true
}
// Resolve possibly ambiguous call arguments explicitly
// instead of depending on implicit resolution in logic below.
if forceMultiLine {
forceCompact = false
}
switch {
case len(list) == 0 && !forceMultiLine:
// Compact form: print nothing.
case len(list) == 1 && !forceMultiLine:
// Compact form.
p.expr(list[0], precLow)
// Tuple must end with comma, to mark it as a tuple.
if mode == modeTuple {
p.printf(",")
}
case forceCompact:
// Compact form but multiple elements.
for i, x := range list {
if i > 0 {
p.printf(", ")
}
p.expr(x, precLow)
}
default:
// Multi-line form.
p.margin += 4
for i, x := range list {
// If we are about to break the line before the first
// element and there are trailing end-of-line comments
// waiting to be printed, delay them and print them as
// whole-line comments preceding that element.
// Do this by printing a newline ourselves and positioning
// so that the end-of-line comment, with the two spaces added,
// will line up with the current margin.
if i == 0 && len(p.comment) > 0 {
p.printf("\n%*s", p.margin-2, "")
}
p.newline()
p.expr(x, precLow)
if mode != modeParen || i+1 < len(list) {
p.printf(",")
}
}
// Final comments.
for _, com := range end.Before {
p.newline()
p.printf("%s", strings.TrimSpace(com.Token))
}
p.margin -= 4
p.newline()
}
p.depth--
p.printf("%s", brack[1:])
}
// listFor formats a ListForExpr (list comprehension).
// The single-line form is:
// [x for y in z if c]
//
// and the multi-line form is:
// [
// x
// for y in z
// if c
// ]
//
func (p *printer) listFor(v *ListForExpr) {
multiLine := v.ForceMultiLine || len(v.End.Before) > 0
// space breaks the line in multiline mode
// or else prints a space.
space := func() {
if multiLine {
p.breakline()
} else {
p.printf(" ")
}
}
if v.Brack != "" {
p.depth++
p.printf("%s", v.Brack[:1])
}
if multiLine {
if v.Brack != "" {
p.margin += 4
}
p.newline()
}
p.expr(v.X, precLow)
for _, c := range v.For {
space()
p.printf("for ")
for i, name := range c.For.Var {
if i > 0 {
p.printf(", ")
}
p.expr(name, precLow)
}
p.printf(" in ")
p.expr(c.For.Expr, precLow)
p.comment = append(p.comment, c.For.Comment().Suffix...)
for _, i := range c.Ifs {
space()
p.printf("if ")
p.expr(i.Cond, precLow)
p.comment = append(p.comment, i.Comment().Suffix...)
}
p.comment = append(p.comment, c.Comment().Suffix...)
}
if multiLine {
for _, com := range v.End.Before {
p.newline()
p.printf("%s", strings.TrimSpace(com.Token))
}
if v.Brack != "" {
p.margin -= 4
}
p.newline()
}
if v.Brack != "" {
p.printf("%s", v.Brack[1:])
p.depth--
}
}

262
vendor/github.com/bazelbuild/buildtools/build/quote.go generated vendored Normal file
View File

@ -0,0 +1,262 @@
/*
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Python quoted strings.
package build
import (
"bytes"
"fmt"
"strconv"
"strings"
)
// unesc maps single-letter chars following \ to their actual values.
var unesc = [256]byte{
'a': '\a',
'b': '\b',
'f': '\f',
'n': '\n',
'r': '\r',
't': '\t',
'v': '\v',
'\\': '\\',
'\'': '\'',
'"': '"',
}
// esc maps escape-worthy bytes to the char that should follow \.
var esc = [256]byte{
'\a': 'a',
'\b': 'b',
'\f': 'f',
'\n': 'n',
'\r': 'r',
'\t': 't',
'\v': 'v',
'\\': '\\',
'\'': '\'',
'"': '"',
}
// notEsc is a list of characters that can follow a \ in a string value
// without having to escape the \. That is, since ( is in this list, we
// quote the Go string "foo\\(bar" as the Python literal "foo\(bar".
// This really does happen in BUILD files, especially in strings
// being used as shell arguments containing regular expressions.
const notEsc = " !#$%&()*+,-./:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ{|}~"
// unquote unquotes the quoted string, returning the actual
// string value, whether the original was triple-quoted, and
// an error describing invalid input.
func unquote(quoted string) (s string, triple bool, err error) {
// Check for raw prefix: means don't interpret the inner \.
raw := false
if strings.HasPrefix(quoted, "r") {
raw = true
quoted = quoted[1:]
}
if len(quoted) < 2 {
err = fmt.Errorf("string literal too short")
return
}
if quoted[0] != '"' && quoted[0] != '\'' || quoted[0] != quoted[len(quoted)-1] {
err = fmt.Errorf("string literal has invalid quotes")
}
// Check for triple quoted string.
quote := quoted[0]
if len(quoted) >= 6 && quoted[1] == quote && quoted[2] == quote && quoted[:3] == quoted[len(quoted)-3:] {
triple = true
quoted = quoted[3 : len(quoted)-3]
} else {
quoted = quoted[1 : len(quoted)-1]
}
// Now quoted is the quoted data, but no quotes.
// If we're in raw mode or there are no escapes, we're done.
if raw || !strings.Contains(quoted, `\`) {
s = quoted
return
}
// Otherwise process quoted string.
// Each iteration processes one escape sequence along with the
// plain text leading up to it.
var buf bytes.Buffer
for {
// Remove prefix before escape sequence.
i := strings.Index(quoted, `\`)
if i < 0 {
i = len(quoted)
}
buf.WriteString(quoted[:i])
quoted = quoted[i:]
if len(quoted) == 0 {
break
}
// Process escape sequence.
if len(quoted) == 1 {
err = fmt.Errorf(`truncated escape sequence \`)
return
}
switch quoted[1] {
default:
// In Python, if \z (for some byte z) is not a known escape sequence
// then it appears as literal text in the string.
buf.WriteString(quoted[:2])
quoted = quoted[2:]
case '\n':
// Ignore the escape and the line break.
quoted = quoted[2:]
case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '\'', '"':
// One-char escape
buf.WriteByte(unesc[quoted[1]])
quoted = quoted[2:]
case '0', '1', '2', '3', '4', '5', '6', '7':
// Octal escape, up to 3 digits.
n := int(quoted[1] - '0')
quoted = quoted[2:]
for i := 1; i < 3; i++ {
if len(quoted) == 0 || quoted[0] < '0' || '7' < quoted[0] {
break
}
n = n*8 + int(quoted[0]-'0')
quoted = quoted[1:]
}
if n >= 256 {
// NOTE: Python silently discards the high bit,
// so that '\541' == '\141' == 'a'.
// Let's see if we can avoid doing that in BUILD files.
err = fmt.Errorf(`invalid escape sequence \%03o`, n)
return
}
buf.WriteByte(byte(n))
case 'x':
// Hexadecimal escape, exactly 2 digits.
if len(quoted) < 4 {
err = fmt.Errorf(`truncated escape sequence %s`, quoted)
return
}
n, err1 := strconv.ParseInt(quoted[2:4], 16, 0)
if err1 != nil {
err = fmt.Errorf(`invalid escape sequence %s`, quoted[:4])
return
}
buf.WriteByte(byte(n))
quoted = quoted[4:]
}
}
s = buf.String()
return
}
// indexByte returns the index of the first instance of b in s, or else -1.
func indexByte(s string, b byte) int {
for i := 0; i < len(s); i++ {
if s[i] == b {
return i
}
}
return -1
}
// hex is a list of the hexadecimal digits, for use in quoting.
// We always print lower-case hexadecimal.
const hex = "0123456789abcdef"
// quote returns the quoted form of the string value "x".
// If triple is true, quote uses the triple-quoted form """x""".
func quote(unquoted string, triple bool) string {
q := `"`
if triple {
q = `"""`
}
var buf bytes.Buffer
buf.WriteString(q)
for i := 0; i < len(unquoted); i++ {
c := unquoted[i]
if c == '"' && triple && (i+1 < len(unquoted) && unquoted[i+1] != '"' || i+2 < len(unquoted) && unquoted[i+2] != '"') {
// Can pass up to two quotes through, because they are followed by a non-quote byte.
buf.WriteByte(c)
if i+1 < len(unquoted) && unquoted[i+1] == '"' {
buf.WriteByte(c)
i++
}
continue
}
if triple && c == '\n' {
// Can allow newline in triple-quoted string.
buf.WriteByte(c)
continue
}
if c == '\'' {
// Can allow ' since we always use ".
buf.WriteByte(c)
continue
}
if c == '\\' {
if i+1 < len(unquoted) && indexByte(notEsc, unquoted[i+1]) >= 0 {
// Can pass \ through when followed by a byte that
// known not to be a valid escape sequence and also
// that does not trigger an escape sequence of its own.
// Use this, because various BUILD files do.
buf.WriteByte('\\')
buf.WriteByte(unquoted[i+1])
i++
continue
}
}
if esc[c] != 0 {
buf.WriteByte('\\')
buf.WriteByte(esc[c])
continue
}
if c < 0x20 || c >= 0x80 {
// BUILD files are supposed to be Latin-1, so escape all control and high bytes.
// I'd prefer to use \x here, but Blaze does not implement
// \x in quoted strings (b/7272572).
buf.WriteByte('\\')
buf.WriteByte(hex[c>>6]) // actually octal but reusing hex digits 0-7.
buf.WriteByte(hex[(c>>3)&7])
buf.WriteByte(hex[c&7])
/*
buf.WriteByte('\\')
buf.WriteByte('x')
buf.WriteByte(hex[c>>4])
buf.WriteByte(hex[c&0xF])
*/
continue
}
buf.WriteByte(c)
continue
}
buf.WriteString(q)
return buf.String()
}

View File

@ -0,0 +1,817 @@
/*
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Rewriting of high-level (not purely syntactic) BUILD constructs.
package build
import (
"path"
"regexp"
"sort"
"strings"
"github.com/bazelbuild/buildtools/tables"
)
// For debugging: flag to disable certain rewrites.
var DisableRewrites []string
// disabled reports whether the named rewrite is disabled.
func disabled(name string) bool {
for _, x := range DisableRewrites {
if name == x {
return true
}
}
return false
}
// For debugging: allow sorting of these lists even with sorting otherwise disabled.
var AllowSort []string
// allowedSort reports whether sorting is allowed in the named context.
func allowedSort(name string) bool {
for _, x := range AllowSort {
if name == x {
return true
}
}
return false
}
// Rewrite applies the high-level Buildifier rewrites to f, modifying it in place.
// If info is non-nil, Rewrite updates it with information about the rewrite.
func Rewrite(f *File, info *RewriteInfo) {
// Allocate an info so that helpers can assume it's there.
if info == nil {
info = new(RewriteInfo)
}
for _, r := range rewrites {
if !disabled(r.name) {
r.fn(f, info)
}
}
}
// RewriteInfo collects information about what Rewrite did.
type RewriteInfo struct {
EditLabel int // number of label strings edited
NameCall int // number of calls with argument names added
SortCall int // number of call argument lists sorted
SortStringList int // number of string lists sorted
UnsafeSort int // number of unsafe string lists sorted
Log []string // log entries - may change
}
func (info *RewriteInfo) String() string {
s := ""
if info.EditLabel > 0 {
s += " label"
}
if info.NameCall > 0 {
s += " callname"
}
if info.SortCall > 0 {
s += " callsort"
}
if info.SortStringList > 0 {
s += " listsort"
}
if info.UnsafeSort > 0 {
s += " unsafesort"
}
if s != "" {
s = s[1:]
}
return s
}
// rewrites is the list of all Buildifier rewrites, in the order in which they are applied.
// The order here matters: for example, label canonicalization must happen
// before sorting lists of strings.
var rewrites = []struct {
name string
fn func(*File, *RewriteInfo)
}{
{"callsort", sortCallArgs},
{"label", fixLabels},
{"listsort", sortStringLists},
{"multiplus", fixMultilinePlus},
}
// leaveAlone reports whether any of the nodes on the stack are marked
// with a comment containing "buildifier: leave-alone".
func leaveAlone(stk []Expr, final Expr) bool {
for _, x := range stk {
if leaveAlone1(x) {
return true
}
}
if final != nil && leaveAlone1(final) {
return true
}
return false
}
// hasComment reports whether x is marked with a comment that
// after being converted to lower case, contains the specified text.
func hasComment(x Expr, text string) bool {
for _, com := range x.Comment().Before {
if strings.Contains(strings.ToLower(com.Token), text) {
return true
}
}
return false
}
// leaveAlone1 reports whether x is marked with a comment containing
// "buildifier: leave-alone", case-insensitive.
func leaveAlone1(x Expr) bool {
return hasComment(x, "buildifier: leave-alone")
}
// doNotSort reports whether x is marked with a comment containing
// "do not sort", case-insensitive.
func doNotSort(x Expr) bool {
return hasComment(x, "do not sort")
}
// keepSorted reports whether x is marked with a comment containing
// "keep sorted", case-insensitive.
func keepSorted(x Expr) bool {
return hasComment(x, "keep sorted")
}
// fixLabels rewrites labels into a canonical form.
//
// First, it joins labels written as string addition, turning
// "//x" + ":y" (usually split across multiple lines) into "//x:y".
//
// Second, it removes redundant target qualifiers, turning labels like
// "//third_party/m4:m4" into "//third_party/m4" as well as ones like
// "@foo//:foo" into "@foo".
//
func fixLabels(f *File, info *RewriteInfo) {
joinLabel := func(p *Expr) {
add, ok := (*p).(*BinaryExpr)
if !ok || add.Op != "+" {
return
}
str1, ok := add.X.(*StringExpr)
if !ok || !strings.HasPrefix(str1.Value, "//") || strings.Contains(str1.Value, " ") {
return
}
str2, ok := add.Y.(*StringExpr)
if !ok || strings.Contains(str2.Value, " ") {
return
}
info.EditLabel++
str1.Value += str2.Value
// Deleting nodes add and str2.
// Merge comments from add, str1, and str2 and save in str1.
com1 := add.Comment()
com2 := str1.Comment()
com3 := str2.Comment()
com1.Before = append(com1.Before, com2.Before...)
com1.Before = append(com1.Before, com3.Before...)
com1.Suffix = append(com1.Suffix, com2.Suffix...)
com1.Suffix = append(com1.Suffix, com3.Suffix...)
*str1.Comment() = *com1
*p = str1
}
labelPrefix := "//"
if tables.StripLabelLeadingSlashes {
labelPrefix = ""
}
// labelRE matches label strings, e.g. @r//x/y/z:abc
// where $1 is @r//x/y/z, $2 is @r//, $3 is r, $4 is z, $5 is abc.
labelRE := regexp.MustCompile(`^(((?:@(\w+))?//|` + labelPrefix + `)(?:.+/)?([^:]*))(?::([^:]+))?$`)
shortenLabel := func(v Expr) {
str, ok := v.(*StringExpr)
if !ok {
return
}
editPerformed := false
if tables.StripLabelLeadingSlashes && strings.HasPrefix(str.Value, "//") {
if path.Dir(f.Path) == "." || !strings.HasPrefix(str.Value, "//:") {
editPerformed = true
str.Value = str.Value[2:]
}
}
if tables.ShortenAbsoluteLabelsToRelative {
thisPackage := labelPrefix + path.Dir(f.Path)
if str.Value == thisPackage {
editPerformed = true
str.Value = ":" + path.Base(str.Value)
} else if strings.HasPrefix(str.Value, thisPackage+":") {
editPerformed = true
str.Value = str.Value[len(thisPackage):]
}
}
m := labelRE.FindStringSubmatch(str.Value)
if m == nil {
return
}
if m[4] != "" && m[4] == m[5] { // e.g. //foo:foo
editPerformed = true
str.Value = m[1]
} else if m[3] != "" && m[4] == "" && m[3] == m[5] { // e.g. @foo//:foo
editPerformed = true
str.Value = "@" + m[3]
}
if editPerformed {
info.EditLabel++
}
}
Walk(f, func(v Expr, stk []Expr) {
switch v := v.(type) {
case *CallExpr:
if leaveAlone(stk, v) {
return
}
for i := range v.List {
if leaveAlone1(v.List[i]) {
continue
}
as, ok := v.List[i].(*BinaryExpr)
if !ok || as.Op != "=" {
continue
}
key, ok := as.X.(*LiteralExpr)
if !ok || !tables.IsLabelArg[key.Token] || tables.LabelBlacklist[callName(v)+"."+key.Token] {
continue
}
if leaveAlone1(as.Y) {
continue
}
if list, ok := as.Y.(*ListExpr); ok {
for i := range list.List {
if leaveAlone1(list.List[i]) {
continue
}
joinLabel(&list.List[i])
shortenLabel(list.List[i])
}
}
if set, ok := as.Y.(*SetExpr); ok {
for i := range set.List {
if leaveAlone1(set.List[i]) {
continue
}
joinLabel(&set.List[i])
shortenLabel(set.List[i])
}
} else {
joinLabel(&as.Y)
shortenLabel(as.Y)
}
}
}
})
}
// callName returns the name of the rule being called by call.
// If the call is not to a literal rule name, callName returns "".
func callName(call *CallExpr) string {
rule, ok := call.X.(*LiteralExpr)
if !ok {
return ""
}
return rule.Token
}
// sortCallArgs sorts lists of named arguments to a call.
func sortCallArgs(f *File, info *RewriteInfo) {
Walk(f, func(v Expr, stk []Expr) {
call, ok := v.(*CallExpr)
if !ok {
return
}
if leaveAlone(stk, call) {
return
}
rule := callName(call)
if rule == "" {
return
}
// Find the tail of the argument list with named arguments.
start := len(call.List)
for start > 0 && argName(call.List[start-1]) != "" {
start--
}
// Record information about each arg into a sortable list.
var args namedArgs
for i, x := range call.List[start:] {
name := argName(x)
args = append(args, namedArg{ruleNamePriority(rule, name), name, i, x})
}
// Sort the list and put the args back in the new order.
if sort.IsSorted(args) {
return
}
info.SortCall++
sort.Sort(args)
for i, x := range args {
call.List[start+i] = x.expr
}
})
}
// ruleNamePriority maps a rule argument name to its sorting priority.
// It could use the auto-generated per-rule tables but for now it just
// falls back to the original list.
func ruleNamePriority(rule, arg string) int {
ruleArg := rule + "." + arg
if val, ok := tables.NamePriority[ruleArg]; ok {
return val
}
return tables.NamePriority[arg]
/*
list := ruleArgOrder[rule]
if len(list) == 0 {
return tables.NamePriority[arg]
}
for i, x := range list {
if x == arg {
return i
}
}
return len(list)
*/
}
// If x is of the form key=value, argName returns the string key.
// Otherwise argName returns "".
func argName(x Expr) string {
if as, ok := x.(*BinaryExpr); ok && as.Op == "=" {
if id, ok := as.X.(*LiteralExpr); ok {
return id.Token
}
}
return ""
}
// A namedArg records information needed for sorting
// a named call argument into its proper position.
type namedArg struct {
priority int // kind of name; first sort key
name string // name; second sort key
index int // original index; final sort key
expr Expr // name=value argument
}
// namedArgs is a slice of namedArg that implements sort.Interface
type namedArgs []namedArg
func (x namedArgs) Len() int { return len(x) }
func (x namedArgs) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x namedArgs) Less(i, j int) bool {
p := x[i]
q := x[j]
if p.priority != q.priority {
return p.priority < q.priority
}
if p.name != q.name {
return p.name < q.name
}
return p.index < q.index
}
// sortStringLists sorts lists of string literals used as specific rule arguments.
func sortStringLists(f *File, info *RewriteInfo) {
Walk(f, func(v Expr, stk []Expr) {
switch v := v.(type) {
case *CallExpr:
if leaveAlone(stk, v) {
return
}
rule := callName(v)
for _, arg := range v.List {
if leaveAlone1(arg) {
continue
}
as, ok := arg.(*BinaryExpr)
if !ok || as.Op != "=" || leaveAlone1(as) || doNotSort(as) {
continue
}
key, ok := as.X.(*LiteralExpr)
if !ok {
continue
}
context := rule + "." + key.Token
if !tables.IsSortableListArg[key.Token] || tables.SortableBlacklist[context] {
continue
}
if disabled("unsafesort") && !tables.SortableWhitelist[context] && !allowedSort(context) {
continue
}
sortStringList(as.Y, info, context)
}
case *BinaryExpr:
if disabled("unsafesort") {
return
}
// "keep sorted" comment on x = list forces sorting of list.
as := v
if as.Op == "=" && keepSorted(as) {
sortStringList(as.Y, info, "?")
}
case *KeyValueExpr:
if disabled("unsafesort") {
return
}
// "keep sorted" before key: list also forces sorting of list.
if keepSorted(v) {
sortStringList(v.Value, info, "?")
}
case *ListExpr:
if disabled("unsafesort") {
return
}
// "keep sorted" comment above first list element also forces sorting of list.
if len(v.List) > 0 && keepSorted(v.List[0]) {
sortStringList(v, info, "?")
}
}
})
}
// SortStringList sorts x, a list of strings.
func SortStringList(x Expr) {
sortStringList(x, nil, "")
}
// sortStringList sorts x, a list of strings.
// The list is broken by non-strings and by blank lines and comments into chunks.
// Each chunk is sorted in place.
func sortStringList(x Expr, info *RewriteInfo, context string) {
list, ok := x.(*ListExpr)
if !ok || len(list.List) < 2 || doNotSort(list.List[0]) {
return
}
forceSort := keepSorted(list.List[0])
// TODO(bazel-team): Decide how to recognize lists that cannot
// be sorted. Avoiding all lists with comments avoids sorting
// lists that say explicitly, in some form or another, why they
// cannot be sorted. For example, many cc_test rules require
// certain order in their deps attributes.
if !forceSort {
if line, _ := hasComments(list); line {
return
}
}
// Sort chunks of the list with no intervening blank lines or comments.
for i := 0; i < len(list.List); {
if _, ok := list.List[i].(*StringExpr); !ok {
i++
continue
}
j := i + 1
for ; j < len(list.List); j++ {
if str, ok := list.List[j].(*StringExpr); !ok || len(str.Before) > 0 {
break
}
}
var chunk []stringSortKey
for index, x := range list.List[i:j] {
chunk = append(chunk, makeSortKey(index, x.(*StringExpr)))
}
if !sort.IsSorted(byStringExpr(chunk)) || !isUniq(chunk) {
if info != nil {
info.SortStringList++
if !tables.SortableWhitelist[context] {
info.UnsafeSort++
info.Log = append(info.Log, "sort:"+context)
}
}
before := chunk[0].x.Comment().Before
chunk[0].x.Comment().Before = nil
sort.Sort(byStringExpr(chunk))
chunk = uniq(chunk)
chunk[0].x.Comment().Before = before
for offset, key := range chunk {
list.List[i+offset] = key.x
}
list.List = append(list.List[:(i+len(chunk))], list.List[j:]...)
}
i = j
}
}
// uniq removes duplicates from a list, which must already be sorted.
// It edits the list in place.
func uniq(sortedList []stringSortKey) []stringSortKey {
out := sortedList[:0]
for _, sk := range sortedList {
if len(out) == 0 || sk.value != out[len(out)-1].value {
out = append(out, sk)
}
}
return out
}
// isUniq reports whether the sorted list only contains unique elements.
func isUniq(list []stringSortKey) bool {
for i := range list {
if i+1 < len(list) && list[i].value == list[i+1].value {
return false
}
}
return true
}
// If stk describes a call argument like rule(arg=...), callArgName
// returns the name of that argument, formatted as "rule.arg".
func callArgName(stk []Expr) string {
n := len(stk)
if n < 2 {
return ""
}
arg := argName(stk[n-1])
if arg == "" {
return ""
}
call, ok := stk[n-2].(*CallExpr)
if !ok {
return ""
}
rule, ok := call.X.(*LiteralExpr)
if !ok {
return ""
}
return rule.Token + "." + arg
}
// A stringSortKey records information about a single string literal to be
// sorted. The strings are first grouped into four phases: most strings,
// strings beginning with ":", strings beginning with "//", and strings
// beginning with "@". The next significant part of the comparison is the list
// of elements in the value, where elements are split at `.' and `:'. Finally
// we compare by value and break ties by original index.
type stringSortKey struct {
phase int
split []string
value string
original int
x Expr
}
func makeSortKey(index int, x *StringExpr) stringSortKey {
key := stringSortKey{
value: x.Value,
original: index,
x: x,
}
switch {
case strings.HasPrefix(x.Value, ":"):
key.phase = 1
case strings.HasPrefix(x.Value, "//") || (tables.StripLabelLeadingSlashes && !strings.HasPrefix(x.Value, "@")):
key.phase = 2
case strings.HasPrefix(x.Value, "@"):
key.phase = 3
}
key.split = strings.Split(strings.Replace(x.Value, ":", ".", -1), ".")
return key
}
// byStringExpr implements sort.Interface for a list of stringSortKey.
type byStringExpr []stringSortKey
func (x byStringExpr) Len() int { return len(x) }
func (x byStringExpr) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x byStringExpr) Less(i, j int) bool {
xi := x[i]
xj := x[j]
if xi.phase != xj.phase {
return xi.phase < xj.phase
}
for k := 0; k < len(xi.split) && k < len(xj.split); k++ {
if xi.split[k] != xj.split[k] {
return xi.split[k] < xj.split[k]
}
}
if len(xi.split) != len(xj.split) {
return len(xi.split) < len(xj.split)
}
if xi.value != xj.value {
return xi.value < xj.value
}
return xi.original < xj.original
}
// fixMultilinePlus turns
//
// ... +
// [ ... ]
//
// ... +
// call(...)
//
// into
// ... + [
// ...
// ]
//
// ... + call(
// ...
// )
//
// which typically works better with our aggressively compact formatting.
func fixMultilinePlus(f *File, info *RewriteInfo) {
// List manipulation helpers.
// As a special case, we treat f([...]) as a list, mainly
// for glob.
// isList reports whether x is a list.
var isList func(x Expr) bool
isList = func(x Expr) bool {
switch x := x.(type) {
case *ListExpr:
return true
case *CallExpr:
if len(x.List) == 1 {
return isList(x.List[0])
}
}
return false
}
// isMultiLine reports whether x is a multiline list.
var isMultiLine func(Expr) bool
isMultiLine = func(x Expr) bool {
switch x := x.(type) {
case *ListExpr:
return x.ForceMultiLine || len(x.List) > 1
case *CallExpr:
if x.ForceMultiLine || len(x.List) > 1 && !x.ForceCompact {
return true
}
if len(x.List) == 1 {
return isMultiLine(x.List[0])
}
}
return false
}
// forceMultiLine tries to force the list x to use a multiline form.
// It reports whether it was successful.
var forceMultiLine func(Expr) bool
forceMultiLine = func(x Expr) bool {
switch x := x.(type) {
case *ListExpr:
// Already multi line?
if x.ForceMultiLine {
return true
}
// If this is a list containing a list, force the
// inner list to be multiline instead.
if len(x.List) == 1 && forceMultiLine(x.List[0]) {
return true
}
x.ForceMultiLine = true
return true
case *CallExpr:
if len(x.List) == 1 {
return forceMultiLine(x.List[0])
}
}
return false
}
skip := map[Expr]bool{}
Walk(f, func(v Expr, stk []Expr) {
if skip[v] {
return
}
bin, ok := v.(*BinaryExpr)
if !ok || bin.Op != "+" {
return
}
// Found a +.
// w + x + y + z parses as ((w + x) + y) + z,
// so chase down the left side to make a list of
// all the things being added together, separated
// by the BinaryExprs that join them.
// Mark them as "skip" so that when Walk recurses
// into the subexpressions, we won't reprocess them.
var all []Expr
for {
all = append(all, bin.Y, bin)
bin1, ok := bin.X.(*BinaryExpr)
if !ok || bin1.Op != "+" {
break
}
bin = bin1
skip[bin] = true
}
all = append(all, bin.X)
// Because the outermost expression was the
// rightmost one, the list is backward. Reverse it.
for i, j := 0, len(all)-1; i < j; i, j = i+1, j-1 {
all[i], all[j] = all[j], all[i]
}
// The 'all' slice is alternating addends and BinaryExpr +'s:
// w, +, x, +, y, +, z
// If there are no lists involved, don't rewrite anything.
haveList := false
for i := 0; i < len(all); i += 2 {
if isList(all[i]) {
haveList = true
break
}
}
if !haveList {
return
}
// Okay, there are lists.
// Consider each + next to a line break.
for i := 1; i < len(all); i += 2 {
bin := all[i].(*BinaryExpr)
if !bin.LineBreak {
continue
}
// We're going to break the line after the +.
// If it is followed by a list, force that to be
// multiline instead.
if forceMultiLine(all[i+1]) {
bin.LineBreak = false
continue
}
// If the previous list was multiline already,
// don't bother with the line break after
// the +.
if isMultiLine(all[i-1]) {
bin.LineBreak = false
continue
}
}
})
}
// hasComments reports whether any comments are associated with
// the list or its elements.
func hasComments(list *ListExpr) (line, suffix bool) {
com := list.Comment()
if len(com.Before) > 0 || len(com.After) > 0 || len(list.End.Before) > 0 {
line = true
}
if len(com.Suffix) > 0 {
suffix = true
}
for _, elem := range list.List {
com := elem.Comment()
if len(com.Before) > 0 {
line = true
}
if len(com.Suffix) > 0 {
suffix = true
}
}
return
}

260
vendor/github.com/bazelbuild/buildtools/build/rule.go generated vendored Normal file
View File

@ -0,0 +1,260 @@
/*
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Rule-level API for inspecting and modifying a build.File syntax tree.
package build
import "strings"
// A Rule represents a single BUILD rule.
type Rule struct {
Call *CallExpr
}
// Rules returns the rules in the file of the given kind (such as "go_library").
// If kind == "", Rules returns all rules in the file.
func (f *File) Rules(kind string) []*Rule {
var all []*Rule
for _, stmt := range f.Stmt {
call, ok := stmt.(*CallExpr)
if !ok {
continue
}
rule := &Rule{call}
if kind != "" && rule.Kind() != kind {
continue
}
all = append(all, rule)
}
return all
}
// RuleAt returns the rule in the file that starts at the specified line, or null if no such rule.
func (f *File) RuleAt(linenum int) *Rule {
for _, stmt := range f.Stmt {
call, ok := stmt.(*CallExpr)
if !ok {
continue
}
start, end := call.X.Span()
if start.Line <= linenum && linenum <= end.Line {
return &Rule{call}
}
}
return nil
}
// DelRules removes rules with the given kind and name from the file.
// An empty kind matches all kinds; an empty name matches all names.
// It returns the number of rules that were deleted.
func (f *File) DelRules(kind, name string) int {
var i int
for _, stmt := range f.Stmt {
if call, ok := stmt.(*CallExpr); ok {
r := &Rule{call}
if (kind == "" || r.Kind() == kind) &&
(name == "" || r.AttrString("name") == name) {
continue
}
}
f.Stmt[i] = stmt
i++
}
n := len(f.Stmt) - i
f.Stmt = f.Stmt[:i]
return n
}
// Kind returns the rule's kind (such as "go_library").
// The kind of the rule may be given by a literal or it may be a sequence of dot expressions that
// begins with a literal, if the call expression does not conform to either of these forms, an
// empty string will be returned
func (r *Rule) Kind() string {
var names []string
expr := r.Call.X
for {
x, ok := expr.(*DotExpr)
if !ok {
break
}
names = append(names, x.Name)
expr = x.X
}
x, ok := expr.(*LiteralExpr)
if !ok {
return ""
}
names = append(names, x.Token)
// Reverse the elements since the deepest expression contains the leading literal
for l, r := 0, len(names)-1; l < r; l, r = l+1, r-1 {
names[l], names[r] = names[r], names[l]
}
return strings.Join(names, ".")
}
// SetKind changes rule's kind (such as "go_library").
func (r *Rule) SetKind(kind string) {
names := strings.Split(kind, ".")
var expr Expr
expr = &LiteralExpr{Token: names[0]}
for _, name := range names[1:] {
expr = &DotExpr{X: expr, Name: name}
}
r.Call.X = expr
}
// Name returns the rule's target name.
// If the rule has no target name, Name returns the empty string.
func (r *Rule) Name() string {
return r.AttrString("name")
}
// AttrKeys returns the keys of all the rule's attributes.
func (r *Rule) AttrKeys() []string {
var keys []string
for _, expr := range r.Call.List {
if binExpr, ok := expr.(*BinaryExpr); ok && binExpr.Op == "=" {
if keyExpr, ok := binExpr.X.(*LiteralExpr); ok {
keys = append(keys, keyExpr.Token)
}
}
}
return keys
}
// AttrDefn returns the BinaryExpr defining the rule's attribute with the given key.
// That is, the result is a *BinaryExpr with Op == "=".
// If the rule has no such attribute, AttrDefn returns nil.
func (r *Rule) AttrDefn(key string) *BinaryExpr {
for _, kv := range r.Call.List {
as, ok := kv.(*BinaryExpr)
if !ok || as.Op != "=" {
continue
}
k, ok := as.X.(*LiteralExpr)
if !ok || k.Token != key {
continue
}
return as
}
return nil
}
// Attr returns the value of the rule's attribute with the given key
// (such as "name" or "deps").
// If the rule has no such attribute, Attr returns nil.
func (r *Rule) Attr(key string) Expr {
as := r.AttrDefn(key)
if as == nil {
return nil
}
return as.Y
}
// DelAttr deletes the rule's attribute with the named key.
// It returns the old value of the attribute, or nil if the attribute was not found.
func (r *Rule) DelAttr(key string) Expr {
list := r.Call.List
for i, kv := range list {
as, ok := kv.(*BinaryExpr)
if !ok || as.Op != "=" {
continue
}
k, ok := as.X.(*LiteralExpr)
if !ok || k.Token != key {
continue
}
copy(list[i:], list[i+1:])
r.Call.List = list[:len(list)-1]
return as.Y
}
return nil
}
// SetAttr sets the rule's attribute with the given key to value.
// If the rule has no attribute with the key, SetAttr appends
// one to the end of the rule's attribute list.
func (r *Rule) SetAttr(key string, val Expr) {
as := r.AttrDefn(key)
if as != nil {
as.Y = val
return
}
r.Call.List = append(r.Call.List,
&BinaryExpr{
X: &LiteralExpr{Token: key},
Op: "=",
Y: val,
},
)
}
// AttrLiteral returns the literal form of the rule's attribute
// with the given key (such as "cc_api_version"), only when
// that value is an identifier or number.
// If the rule has no such attribute or the attribute is not an identifier or number,
// AttrLiteral returns "".
func (r *Rule) AttrLiteral(key string) string {
lit, ok := r.Attr(key).(*LiteralExpr)
if !ok {
return ""
}
return lit.Token
}
// AttrString returns the value of the rule's attribute
// with the given key (such as "name"), as a string.
// If the rule has no such attribute or the attribute has a non-string value,
// Attr returns the empty string.
func (r *Rule) AttrString(key string) string {
str, ok := r.Attr(key).(*StringExpr)
if !ok {
return ""
}
return str.Value
}
// AttrStrings returns the value of the rule's attribute
// with the given key (such as "srcs"), as a []string.
// If the rule has no such attribute or the attribute is not
// a list of strings, AttrStrings returns a nil slice.
func (r *Rule) AttrStrings(key string) []string {
return Strings(r.Attr(key))
}
// Strings returns expr as a []string.
// If expr is not a list of string literals,
// Strings returns a nil slice instead.
// If expr is an empty list of string literals,
// returns a non-nil empty slice.
// (this allows differentiating between these two cases)
func Strings(expr Expr) []string {
list, ok := expr.(*ListExpr)
if !ok {
return nil
}
all := []string{} // not nil
for _, l := range list.List {
str, ok := l.(*StringExpr)
if !ok {
return nil
}
all = append(all, str.Value)
}
return all
}

423
vendor/github.com/bazelbuild/buildtools/build/syntax.go generated vendored Normal file
View File

@ -0,0 +1,423 @@
/*
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package build implements parsing and printing of BUILD files.
package build
// Syntax data structure definitions.
import (
"strings"
"unicode/utf8"
)
// A Position describes the position between two bytes of input.
type Position struct {
Line int // line in input (starting at 1)
LineRune int // rune in line (starting at 1)
Byte int // byte in input (starting at 0)
}
// add returns the position at the end of s, assuming it starts at p.
func (p Position) add(s string) Position {
p.Byte += len(s)
if n := strings.Count(s, "\n"); n > 0 {
p.Line += n
s = s[strings.LastIndex(s, "\n")+1:]
p.LineRune = 1
}
p.LineRune += utf8.RuneCountInString(s)
return p
}
// An Expr represents an input element.
type Expr interface {
// Span returns the start and end position of the expression,
// excluding leading or trailing comments.
Span() (start, end Position)
// Comment returns the comments attached to the expression.
// This method would normally be named 'Comments' but that
// would interfere with embedding a type of the same name.
Comment() *Comments
}
// A Comment represents a single # comment.
type Comment struct {
Start Position
Token string // without trailing newline
Suffix bool // an end of line (not whole line) comment
}
// Comments collects the comments associated with an expression.
type Comments struct {
Before []Comment // whole-line comments before this expression
Suffix []Comment // end-of-line comments after this expression
// For top-level expressions only, After lists whole-line
// comments following the expression.
After []Comment
}
// Comment returns the receiver. This isn't useful by itself, but
// a Comments struct is embedded into all the expression
// implementation types, and this gives each of those a Comment
// method to satisfy the Expr interface.
func (c *Comments) Comment() *Comments {
return c
}
// A File represents an entire BUILD file.
type File struct {
Path string // file path, relative to workspace directory
Comments
Stmt []Expr
}
func (x *File) Span() (start, end Position) {
if len(x.Stmt) == 0 {
return
}
start, _ = x.Stmt[0].Span()
_, end = x.Stmt[len(x.Stmt)-1].Span()
return start, end
}
// A CommentBlock represents a top-level block of comments separate
// from any rule.
type CommentBlock struct {
Comments
Start Position
}
func (x *CommentBlock) Span() (start, end Position) {
return x.Start, x.Start
}
// A PythonBlock represents a blob of Python code, typically a def or for loop.
type PythonBlock struct {
Comments
Start Position
Token string // raw Python code, including final newline
}
func (x *PythonBlock) Span() (start, end Position) {
return x.Start, x.Start.add(x.Token)
}
// A LiteralExpr represents a literal identifier or number.
type LiteralExpr struct {
Comments
Start Position
Token string // identifier token
}
func (x *LiteralExpr) Span() (start, end Position) {
return x.Start, x.Start.add(x.Token)
}
// A StringExpr represents a single literal string.
type StringExpr struct {
Comments
Start Position
Value string // string value (decoded)
TripleQuote bool // triple quote output
End Position
// To allow specific formatting of string literals,
// at least within our requirements, record the
// preferred form of Value. This field is a hint:
// it is only used if it is a valid quoted form for Value.
Token string
}
func (x *StringExpr) Span() (start, end Position) {
return x.Start, x.End
}
// An End represents the end of a parenthesized or bracketed expression.
// It is a place to hang comments.
type End struct {
Comments
Pos Position
}
func (x *End) Span() (start, end Position) {
return x.Pos, x.Pos.add(")")
}
// A CallExpr represents a function call expression: X(List).
type CallExpr struct {
Comments
X Expr
ListStart Position // position of (
List []Expr
End // position of )
ForceCompact bool // force compact (non-multiline) form when printing
ForceMultiLine bool // force multiline form when printing
}
func (x *CallExpr) Span() (start, end Position) {
start, _ = x.X.Span()
return start, x.End.Pos.add(")")
}
// A DotExpr represents a field selector: X.Name.
type DotExpr struct {
Comments
X Expr
Dot Position
NamePos Position
Name string
}
func (x *DotExpr) Span() (start, end Position) {
start, _ = x.X.Span()
return start, x.NamePos.add(x.Name)
}
// A ListForExpr represents a list comprehension expression: [X for ... if ...].
type ListForExpr struct {
Comments
ForceMultiLine bool // split expression across multiple lines
Brack string // "", "()", or "[]"
Start Position
X Expr
For []*ForClauseWithIfClausesOpt
End
}
func (x *ListForExpr) Span() (start, end Position) {
return x.Start, x.End.Pos.add("]")
}
// A ForClause represents a for clause in a list comprehension: for Var in Expr.
type ForClause struct {
Comments
For Position
Var []Expr
In Position
Expr Expr
}
func (x *ForClause) Span() (start, end Position) {
_, end = x.Expr.Span()
return x.For, end
}
// An IfClause represents an if clause in a list comprehension: if Cond.
type IfClause struct {
Comments
If Position
Cond Expr
}
func (x *IfClause) Span() (start, end Position) {
_, end = x.Cond.Span()
return x.If, end
}
// A ForClauseWithIfClausesOpt represents a for clause in a list comprehension followed by optional
// if expressions: for ... in ... [if ... if ...]
type ForClauseWithIfClausesOpt struct {
Comments
For *ForClause
Ifs []*IfClause
}
func (x *ForClauseWithIfClausesOpt) Span() (start, end Position) {
start, end = x.For.Span()
if len(x.Ifs) > 0 {
_, end = x.Ifs[len(x.Ifs)-1].Span()
}
return start, end
}
// A KeyValueExpr represents a dictionary entry: Key: Value.
type KeyValueExpr struct {
Comments
Key Expr
Colon Position
Value Expr
}
func (x *KeyValueExpr) Span() (start, end Position) {
start, _ = x.Key.Span()
_, end = x.Value.Span()
return start, end
}
// A DictExpr represents a dictionary literal: { List }.
type DictExpr struct {
Comments
Start Position
List []Expr // all *KeyValueExprs
Comma Position // position of trailing comma, if any
End
ForceMultiLine bool // force multiline form when printing
}
func (x *DictExpr) Span() (start, end Position) {
return x.Start, x.End.Pos.add("}")
}
// A ListExpr represents a list literal: [ List ].
type ListExpr struct {
Comments
Start Position
List []Expr
Comma Position // position of trailing comma, if any
End
ForceMultiLine bool // force multiline form when printing
}
func (x *ListExpr) Span() (start, end Position) {
return x.Start, x.End.Pos.add("]")
}
// A SetExpr represents a set literal: { List }.
type SetExpr struct {
Comments
Start Position
List []Expr
Comma Position // position of trailing comma, if any
End
ForceMultiLine bool // force multiline form when printing
}
func (x *SetExpr) Span() (start, end Position) {
return x.Start, x.End.Pos.add("}")
}
// A TupleExpr represents a tuple literal: (List)
type TupleExpr struct {
Comments
Start Position
List []Expr
Comma Position // position of trailing comma, if any
End
ForceCompact bool // force compact (non-multiline) form when printing
ForceMultiLine bool // force multiline form when printing
}
func (x *TupleExpr) Span() (start, end Position) {
return x.Start, x.End.Pos.add(")")
}
// A UnaryExpr represents a unary expression: Op X.
type UnaryExpr struct {
Comments
OpStart Position
Op string
X Expr
}
func (x *UnaryExpr) Span() (start, end Position) {
_, end = x.X.Span()
return x.OpStart, end
}
// A BinaryExpr represents a binary expression: X Op Y.
type BinaryExpr struct {
Comments
X Expr
OpStart Position
Op string
LineBreak bool // insert line break between Op and Y
Y Expr
}
func (x *BinaryExpr) Span() (start, end Position) {
start, _ = x.X.Span()
_, end = x.Y.Span()
return start, end
}
// A ParenExpr represents a parenthesized expression: (X).
type ParenExpr struct {
Comments
Start Position
X Expr
End
ForceMultiLine bool // insert line break after opening ( and before closing )
}
func (x *ParenExpr) Span() (start, end Position) {
return x.Start, x.End.Pos.add(")")
}
// A SliceExpr represents a slice expression: X[Y:Z].
type SliceExpr struct {
Comments
X Expr
SliceStart Position
Y Expr
Colon Position
Z Expr
End Position
}
func (x *SliceExpr) Span() (start, end Position) {
start, _ = x.X.Span()
return start, x.End
}
// An IndexExpr represents an index expression: X[Y].
type IndexExpr struct {
Comments
X Expr
IndexStart Position
Y Expr
End Position
}
func (x *IndexExpr) Span() (start, end Position) {
start, _ = x.X.Span()
return start, x.End
}
// A LambdaExpr represents a lambda expression: lambda Var: Expr.
type LambdaExpr struct {
Comments
Lambda Position
Var []Expr
Colon Position
Expr Expr
}
func (x *LambdaExpr) Span() (start, end Position) {
_, end = x.Expr.Span()
return x.Lambda, end
}
// ConditionalExpr represents the conditional: X if TEST else ELSE.
type ConditionalExpr struct {
Comments
Then Expr
IfStart Position
Test Expr
ElseStart Position
Else Expr
}
// Span returns the start and end position of the expression,
// excluding leading or trailing comments.
func (x *ConditionalExpr) Span() (start, end Position) {
start, _ = x.Then.Span()
_, end = x.Else.Span()
return start, end
}

132
vendor/github.com/bazelbuild/buildtools/build/walk.go generated vendored Normal file
View File

@ -0,0 +1,132 @@
/*
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package build
// Walk walks the expression tree v, calling f on all subexpressions
// in a preorder traversal.
//
// The stk argument is the stack of expressions in the recursion above x,
// from outermost to innermost.
//
func Walk(v Expr, f func(x Expr, stk []Expr)) {
var stack []Expr
walk1(&v, &stack, func(x Expr, stk []Expr) Expr {
f(x, stk)
return nil
})
}
// WalkAndUpdate walks the expression tree v, calling f on all subexpressions
// in a preorder traversal. If f returns a non-nil value, the tree is mutated.
// The new value replaces the old one.
//
// The stk argument is the stack of expressions in the recursion above x,
// from outermost to innermost.
//
func Edit(v Expr, f func(x Expr, stk []Expr) Expr) Expr {
var stack []Expr
return walk1(&v, &stack, f)
}
// walk1 is the actual implementation of Walk and WalkAndUpdate.
// It has the same signature and meaning as Walk,
// except that it maintains in *stack the current stack
// of nodes. Using a pointer to a slice here ensures that
// as the stack grows and shrinks the storage can be
// reused for the next growth.
func walk1(v *Expr, stack *[]Expr, f func(x Expr, stk []Expr) Expr) Expr {
if v == nil {
return nil
}
if res := f(*v, *stack); res != nil {
*v = res
}
*stack = append(*stack, *v)
switch v := (*v).(type) {
case *File:
for _, stmt := range v.Stmt {
walk1(&stmt, stack, f)
}
case *DotExpr:
walk1(&v.X, stack, f)
case *IndexExpr:
walk1(&v.X, stack, f)
walk1(&v.Y, stack, f)
case *KeyValueExpr:
walk1(&v.Key, stack, f)
walk1(&v.Value, stack, f)
case *SliceExpr:
walk1(&v.X, stack, f)
if v.Y != nil {
walk1(&v.Y, stack, f)
}
if v.Z != nil {
walk1(&v.Z, stack, f)
}
case *ParenExpr:
walk1(&v.X, stack, f)
case *UnaryExpr:
walk1(&v.X, stack, f)
case *BinaryExpr:
walk1(&v.X, stack, f)
walk1(&v.Y, stack, f)
case *LambdaExpr:
for i := range v.Var {
walk1(&v.Var[i], stack, f)
}
walk1(&v.Expr, stack, f)
case *CallExpr:
walk1(&v.X, stack, f)
for i := range v.List {
walk1(&v.List[i], stack, f)
}
case *ListExpr:
for i := range v.List {
walk1(&v.List[i], stack, f)
}
case *SetExpr:
for i := range v.List {
walk1(&v.List[i], stack, f)
}
case *TupleExpr:
for i := range v.List {
walk1(&v.List[i], stack, f)
}
case *DictExpr:
for i := range v.List {
walk1(&v.List[i], stack, f)
}
case *ListForExpr:
walk1(&v.X, stack, f)
for _, c := range v.For {
for j := range c.For.Var {
walk1(&c.For.Var[j], stack, f)
}
walk1(&c.For.Expr, stack, f)
for _, i := range c.Ifs {
walk1(&i.Cond, stack, f)
}
}
case *ConditionalExpr:
walk1(&v.Then, stack, f)
walk1(&v.Test, stack, f)
walk1(&v.Else, stack, f)
}
*stack = (*stack)[:len(*stack)-1]
return *v
}

View File

@ -2,8 +2,11 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = ["buffruneio.go"],
importpath = "github.com/pelletier/go-buffruneio",
srcs = [
"jsonparser.go",
"tables.go",
],
importpath = "github.com/bazelbuild/buildtools/tables",
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,62 @@
/*
Copyright 2017 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package tables
import (
"encoding/json"
"io/ioutil"
)
type Definitions struct {
IsLabelArg map[string]bool
LabelBlacklist map[string]bool
IsSortableListArg map[string]bool
SortableBlacklist map[string]bool
SortableWhitelist map[string]bool
NamePriority map[string]int
StripLabelLeadingSlashes bool
ShortenAbsoluteLabelsToRelative bool
}
// ParseJSONDefinitions reads and parses JSON table definitions from file.
func ParseJSONDefinitions(file string) (Definitions, error) {
var definitions Definitions
data, err := ioutil.ReadFile(file)
if err != nil {
return definitions, err
}
err = json.Unmarshal(data, &definitions)
return definitions, err
}
// ParseAndUpdateJSONDefinitions reads definitions from file and merges or
// overrides the values in memory.
func ParseAndUpdateJSONDefinitions(file string, merge bool) error {
definitions, err := ParseJSONDefinitions(file)
if err != nil {
return err
}
if merge {
MergeTables(definitions.IsLabelArg, definitions.LabelBlacklist, definitions.IsSortableListArg, definitions.SortableBlacklist, definitions.SortableWhitelist, definitions.NamePriority, definitions.StripLabelLeadingSlashes, definitions.ShortenAbsoluteLabelsToRelative)
} else {
OverrideTables(definitions.IsLabelArg, definitions.LabelBlacklist, definitions.IsSortableListArg, definitions.SortableBlacklist, definitions.SortableWhitelist, definitions.NamePriority, definitions.StripLabelLeadingSlashes, definitions.ShortenAbsoluteLabelsToRelative)
}
return nil
}

View File

@ -0,0 +1,237 @@
/*
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Tables about what Buildifier can and cannot edit.
// Perhaps eventually this will be
// derived from the BUILD encyclopedia.
package tables
// IsLabelArg: a named argument to a rule call is considered to have a value
// that can be treated as a label or list of labels if the name
// is one of these names. There is a separate blacklist for
// rule-specific exceptions.
var IsLabelArg = map[string]bool{
"app_target": true,
"appdir": true,
"base_package": true,
"build_deps": true,
"cc_deps": true,
"ccdeps": true,
"common_deps": true,
"compile_deps": true,
"compiler": true,
"data": true,
"default_visibility": true,
"dep": true,
"deps": true,
"deps_java": true,
"dont_depend_on": true,
"env_deps": true,
"envscripts": true,
"exported_deps": true,
"exports": true,
"externs_list": true,
"files": true,
"globals": true,
"implementation": true,
"implements": true,
"includes": true,
"interface": true,
"jar": true,
"jars": true,
"javadeps": true,
"lib_deps": true,
"library": true,
"malloc": true,
"model": true,
"mods": true,
"module_deps": true,
"module_target": true,
"of": true,
"plugins": true,
"proto_deps": true,
"proto_target": true,
"protos": true,
"resource": true,
"resources": true,
"runtime_deps": true,
"scope": true,
"shared_deps": true,
"similar_deps": true,
"source_jar": true,
"src": true,
"srcs": true,
"stripped_targets": true,
"suites": true,
"swigdeps": true,
"target": true,
"target_devices": true,
"target_platforms": true,
"template": true,
"test": true,
"tests": true,
"tests_deps": true,
"tool": true,
"tools": true,
"visibility": true,
}
// LabelBlacklist is the list of call arguments that cannot be
// shortened, because they are not interpreted using the same
// rules as for other labels.
var LabelBlacklist = map[string]bool{
// Shortening this can cause visibility checks to fail.
"package_group.includes": true,
}
// IsSortableListArg: a named argument to a rule call is considered to be a sortable list
// if the name is one of these names. There is a separate blacklist for
// rule-specific exceptions.
var IsSortableListArg = map[string]bool{
"cc_deps": true,
"common_deps": true,
"compile_deps": true,
"configs": true,
"constraints": true,
"data": true,
"default_visibility": true,
"deps": true,
"deps_java": true,
"exported_deps": true,
"exports": true,
"filegroups": true,
"files": true,
"hdrs": true,
"imports": true,
"includes": true,
"inherits": true,
"javadeps": true,
"lib_deps": true,
"module_deps": true,
"out": true,
"outs": true,
"packages": true,
"plugin_modules": true,
"proto_deps": true,
"protos": true,
"pubs": true,
"resources": true,
"runtime_deps": true,
"shared_deps": true,
"similar_deps": true,
"srcs": true,
"swigdeps": true,
"swig_includes": true,
"tags": true,
"tests": true,
"tools": true,
"to_start_extensions": true,
"visibility": true,
}
// SortableBlacklist records specific rule arguments that must not be reordered.
var SortableBlacklist = map[string]bool{
"genrule.outs": true,
"genrule.srcs": true,
}
// SortableWhitelist records specific rule arguments that are guaranteed
// to be reorderable, because bazel re-sorts the list itself after reading the BUILD file.
var SortableWhitelist = map[string]bool{
"cc_inc_library.hdrs": true,
"cc_library.hdrs": true,
"java_library.srcs": true,
"java_library.resources": true,
"java_binary.srcs": true,
"java_binary.resources": true,
"java_test.srcs": true,
"java_test.resources": true,
"java_library.constraints": true,
"java_import.constraints": true,
}
// NamePriority maps an argument name to its sorting priority.
//
// NOTE(bazel-team): These are the old buildifier rules. It is likely that this table
// will change, perhaps swapping in a separate table for each call,
// derived from the order used in the Build Encyclopedia.
var NamePriority = map[string]int{
"name": -99,
"gwt_name": -98,
"package_name": -97,
"visible_node_name": -96, // for boq_initial_css_modules and boq_jswire_test_suite
"size": -95,
"timeout": -94,
"testonly": -93,
"src": -92,
"srcdir": -91,
"srcs": -90,
"out": -89,
"outs": -88,
"hdrs": -87,
"has_services": -86, // before api versions, for proto
"include": -85, // before exclude, for glob
"of": -84, // for check_dependencies
"baseline": -83, // for searchbox_library
// All others sort here, at 0.
"destdir": 1,
"exports": 2,
"runtime_deps": 3,
"deps": 4,
"implementation": 5,
"implements": 6,
"alwayslink": 7,
}
var StripLabelLeadingSlashes = false
var ShortenAbsoluteLabelsToRelative = false
// OverrideTables allows a user of the build package to override the special-case rules. The user-provided tables replace the built-in tables.
func OverrideTables(labelArg, blacklist, sortableListArg, sortBlacklist, sortWhitelist map[string]bool, namePriority map[string]int, stripLabelLeadingSlashes, shortenAbsoluteLabelsToRelative bool) {
IsLabelArg = labelArg
LabelBlacklist = blacklist
IsSortableListArg = sortableListArg
SortableBlacklist = sortBlacklist
SortableWhitelist = sortWhitelist
NamePriority = namePriority
StripLabelLeadingSlashes = stripLabelLeadingSlashes
ShortenAbsoluteLabelsToRelative = shortenAbsoluteLabelsToRelative
}
// MergeTables allows a user of the build package to override the special-case rules. The user-provided tables are merged into the built-in tables.
func MergeTables(labelArg, blacklist, sortableListArg, sortBlacklist, sortWhitelist map[string]bool, namePriority map[string]int, stripLabelLeadingSlashes, shortenAbsoluteLabelsToRelative bool) {
for k, v := range labelArg {
IsLabelArg[k] = v
}
for k, v := range blacklist {
LabelBlacklist[k] = v
}
for k, v := range sortableListArg {
IsSortableListArg[k] = v
}
for k, v := range sortBlacklist {
SortableBlacklist[k] = v
}
for k, v := range sortWhitelist {
SortableWhitelist[k] = v
}
for k, v := range namePriority {
NamePriority[k] = v
}
StripLabelLeadingSlashes = stripLabelLeadingSlashes || StripLabelLeadingSlashes
ShortenAbsoluteLabelsToRelative = shortenAbsoluteLabelsToRelative || ShortenAbsoluteLabelsToRelative
}

View File

@ -1 +0,0 @@
*.test

View File

@ -1,7 +0,0 @@
language: go
sudo: false
go:
- 1.3.3
- 1.4.3
- 1.5.3
- tip

View File

@ -1,62 +0,0 @@
# buffruneio
[![Tests Status](https://travis-ci.org/pelletier/go-buffruneio.svg?branch=master)](https://travis-ci.org/pelletier/go-buffruneio)
[![GoDoc](https://godoc.org/github.com/pelletier/go-buffruneio?status.svg)](https://godoc.org/github.com/pelletier/go-buffruneio)
Buffruneio is a wrapper around bufio to provide buffered runes access with
unlimited unreads.
```go
import "github.com/pelletier/go-buffruneio"
```
## Examples
```go
import (
"fmt"
"github.com/pelletier/go-buffruneio"
"strings"
)
reader := buffruneio.NewReader(strings.NewReader("abcd"))
fmt.Println(reader.ReadRune()) // 'a'
fmt.Println(reader.ReadRune()) // 'b'
fmt.Println(reader.ReadRune()) // 'c'
reader.UnreadRune()
reader.UnreadRune()
fmt.Println(reader.ReadRune()) // 'b'
fmt.Println(reader.ReadRune()) // 'c'
```
## Documentation
The documentation and additional examples are available at
[godoc.org](http://godoc.org/github.com/pelletier/go-buffruneio).
## Contribute
Feel free to report bugs and patches using GitHub's pull requests system on
[pelletier/go-toml](https://github.com/pelletier/go-buffruneio). Any feedback is
much appreciated!
## LICENSE
Copyright (c) 2016 Thomas Pelletier
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,110 +0,0 @@
// Package buffruneio is a wrapper around bufio to provide buffered runes access with unlimited unreads.
package buffruneio
import (
"bufio"
"container/list"
"errors"
"io"
)
// Rune to indicate end of file.
const (
EOF = -(iota + 1)
)
// ErrNoRuneToUnread is returned by UnreadRune() when the read index is already at the beginning of the buffer.
var ErrNoRuneToUnread = errors.New("no rune to unwind")
// Reader implements runes buffering for an io.Reader object.
type Reader struct {
buffer *list.List
current *list.Element
input *bufio.Reader
}
// NewReader returns a new Reader.
func NewReader(rd io.Reader) *Reader {
return &Reader{
buffer: list.New(),
input: bufio.NewReader(rd),
}
}
func (rd *Reader) feedBuffer() error {
r, _, err := rd.input.ReadRune()
if err != nil {
if err != io.EOF {
return err
}
r = EOF
}
rd.buffer.PushBack(r)
if rd.current == nil {
rd.current = rd.buffer.Back()
}
return nil
}
// ReadRune reads the next rune from buffer, or from the underlying reader if needed.
func (rd *Reader) ReadRune() (rune, error) {
if rd.current == rd.buffer.Back() || rd.current == nil {
err := rd.feedBuffer()
if err != nil {
return EOF, err
}
}
r := rd.current.Value
rd.current = rd.current.Next()
return r.(rune), nil
}
// UnreadRune pushes back the previously read rune in the buffer, extending it if needed.
func (rd *Reader) UnreadRune() error {
if rd.current == rd.buffer.Front() {
return ErrNoRuneToUnread
}
if rd.current == nil {
rd.current = rd.buffer.Back()
} else {
rd.current = rd.current.Prev()
}
return nil
}
// Forget removes runes stored before the current stream position index.
func (rd *Reader) Forget() {
if rd.current == nil {
rd.current = rd.buffer.Back()
}
for ; rd.current != rd.buffer.Front(); rd.buffer.Remove(rd.current.Prev()) {
}
}
// Peek returns at most the next n runes, reading from the uderlying source if
// needed. Does not move the current index. It includes EOF if reached.
func (rd *Reader) Peek(n int) []rune {
res := make([]rune, 0, n)
cursor := rd.current
for i := 0; i < n; i++ {
if cursor == nil {
err := rd.feedBuffer()
if err != nil {
return res
}
cursor = rd.buffer.Back()
}
if cursor != nil {
r := cursor.Value.(rune)
res = append(res, r)
if r == EOF {
return res
}
cursor = cursor.Next()
}
}
return res
}

View File

@ -1,18 +1,23 @@
sudo: false
language: go
go:
- 1.5.4
- 1.6.3
- 1.7
- 1.7.6
- 1.8.3
- 1.9
- tip
matrix:
allow_failures:
- go: tip
fast_finish: true
script:
- if [ -n "$(go fmt ./...)" ]; then exit 1; fi
- ./test.sh
- ./benchmark.sh $TRAVIS_BRANCH https://github.com/$TRAVIS_REPO_SLUG.git
before_install:
- go get github.com/axw/gocov/gocov
- go get github.com/mattn/goveralls
- if ! go get code.google.com/p/go.tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi
branches:
only: [master]
after_success:
- $HOME/gopath/bin/goveralls -service=travis-ci
- $HOME/gopath/bin/goveralls -service=travis-ci -coverprofile=coverage.out -repotoken $COVERALLS_TOKEN

View File

@ -6,19 +6,16 @@ go_library(
"doc.go",
"keysparsing.go",
"lexer.go",
"match.go",
"marshal.go",
"parser.go",
"position.go",
"query.go",
"querylexer.go",
"queryparser.go",
"token.go",
"toml.go",
"tomltree_conversions.go",
"tomltree_create.go",
"tomltree_write.go",
],
importpath = "github.com/pelletier/go-toml",
visibility = ["//visibility:public"],
deps = ["//vendor/github.com/pelletier/go-buffruneio:go_default_library"],
)
filegroup(

View File

@ -1,6 +1,6 @@
The MIT License (MIT)
Copyright (c) 2013 - 2016 Thomas Pelletier, Eric Anderton
Copyright (c) 2013 - 2017 Thomas Pelletier, Eric Anderton
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@ -19,4 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -6,73 +6,74 @@ This library supports TOML version
[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md)
[![GoDoc](https://godoc.org/github.com/pelletier/go-toml?status.svg)](http://godoc.org/github.com/pelletier/go-toml)
[![license](https://img.shields.io/github/license/pelletier/go-toml.svg)](https://github.com/pelletier/go-toml/blob/master/LICENSE)
[![Build Status](https://travis-ci.org/pelletier/go-toml.svg?branch=master)](https://travis-ci.org/pelletier/go-toml)
[![Coverage Status](https://coveralls.io/repos/github/pelletier/go-toml/badge.svg?branch=master)](https://coveralls.io/github/pelletier/go-toml?branch=master)
[![Go Report Card](https://goreportcard.com/badge/github.com/pelletier/go-toml)](https://goreportcard.com/report/github.com/pelletier/go-toml)
## Features
Go-toml provides the following features for using data parsed from TOML documents:
* Load TOML documents from files and string data
* Easily navigate TOML structure using TomlTree
* Easily navigate TOML structure using Tree
* Mashaling and unmarshaling to and from data structures
* Line & column position data for all parsed elements
* Query support similar to JSON-Path
* [Query support similar to JSON-Path](query/)
* Syntax errors contain line and column numbers
Go-toml is designed to help cover use-cases not covered by reflection-based TOML parsing:
* Semantic evaluation of parsed TOML
* Informing a user of mistakes in the source document, after it has been parsed
* Programatic handling of default values on a case-by-case basis
* Using a TOML document as a flexible data-store
## Import
import "github.com/pelletier/go-toml"
## Usage
### Example
Say you have a TOML file that looks like this:
```toml
[postgres]
user = "pelletier"
password = "mypassword"
```go
import "github.com/pelletier/go-toml"
```
Read the username and password like this:
## Usage example
Read a TOML document:
```go
import (
"fmt"
"github.com/pelletier/go-toml"
)
config, _ := toml.Load(`
[postgres]
user = "pelletier"
password = "mypassword"`)
// retrieve data directly
user := config.Get("postgres.user").(string)
config, err := toml.LoadFile("config.toml")
if err != nil {
fmt.Println("Error ", err.Error())
} else {
// retrieve data directly
user := config.Get("postgres.user").(string)
password := config.Get("postgres.password").(string)
// or using an intermediate object
postgresConfig := config.Get("postgres").(*toml.Tree)
password := postgresConfig.Get("password").(string)
```
// or using an intermediate object
configTree := config.Get("postgres").(*toml.TomlTree)
user = configTree.Get("user").(string)
password = configTree.Get("password").(string)
fmt.Println("User is ", user, ". Password is ", password)
Or use Unmarshal:
// show where elements are in the file
fmt.Println("User position: %v", configTree.GetPosition("user"))
fmt.Println("Password position: %v", configTree.GetPosition("password"))
```go
type Postgres struct {
User string
Password string
}
type Config struct {
Postgres Postgres
}
// use a query to gather elements without walking the tree
results, _ := config.Query("$..[user,password]")
for ii, item := range results.Values() {
fmt.Println("Query result %d: %v", ii, item)
}
doc := []byte(`
[postgres]
user = "pelletier"
password = "mypassword"`)
config := Config{}
toml.Unmarshal(doc, &config)
fmt.Println("user=", config.Postgres.User)
```
Or use a query:
```go
// use a query to gather elements without walking the tree
q, _ := query.Compile("$..[user,password]")
results := q.Execute(config)
for ii, item := range results.Values() {
fmt.Println("Query result %d: %v", ii, item)
}
```
@ -94,7 +95,7 @@ Go-toml provides two handy command line tools:
* `tomljson`: Reads a TOML file and outputs its JSON representation.
```
go install github.com/pelletier/go-toml/cmd/tomjson
go install github.com/pelletier/go-toml/cmd/tomljson
tomljson --help
```

164
vendor/github.com/pelletier/go-toml/benchmark.json generated vendored Normal file
View File

@ -0,0 +1,164 @@
{
"array": {
"key1": [
1,
2,
3
],
"key2": [
"red",
"yellow",
"green"
],
"key3": [
[
1,
2
],
[
3,
4,
5
]
],
"key4": [
[
1,
2
],
[
"a",
"b",
"c"
]
],
"key5": [
1,
2,
3
],
"key6": [
1,
2
]
},
"boolean": {
"False": false,
"True": true
},
"datetime": {
"key1": "1979-05-27T07:32:00Z",
"key2": "1979-05-27T00:32:00-07:00",
"key3": "1979-05-27T00:32:00.999999-07:00"
},
"float": {
"both": {
"key": 6.626e-34
},
"exponent": {
"key1": 5e+22,
"key2": 1000000,
"key3": -0.02
},
"fractional": {
"key1": 1,
"key2": 3.1415,
"key3": -0.01
},
"underscores": {
"key1": 9224617.445991227,
"key2": 1e+100
}
},
"fruit": [{
"name": "apple",
"physical": {
"color": "red",
"shape": "round"
},
"variety": [{
"name": "red delicious"
},
{
"name": "granny smith"
}
]
},
{
"name": "banana",
"variety": [{
"name": "plantain"
}]
}
],
"integer": {
"key1": 99,
"key2": 42,
"key3": 0,
"key4": -17,
"underscores": {
"key1": 1000,
"key2": 5349221,
"key3": 12345
}
},
"products": [{
"name": "Hammer",
"sku": 738594937
},
{},
{
"color": "gray",
"name": "Nail",
"sku": 284758393
}
],
"string": {
"basic": {
"basic": "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."
},
"literal": {
"multiline": {
"lines": "The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n",
"regex2": "I [dw]on't need \\d{2} apples"
},
"quoted": "Tom \"Dubs\" Preston-Werner",
"regex": "\u003c\\i\\c*\\s*\u003e",
"winpath": "C:\\Users\\nodejs\\templates",
"winpath2": "\\\\ServerX\\admin$\\system32\\"
},
"multiline": {
"continued": {
"key1": "The quick brown fox jumps over the lazy dog.",
"key2": "The quick brown fox jumps over the lazy dog.",
"key3": "The quick brown fox jumps over the lazy dog."
},
"key1": "One\nTwo",
"key2": "One\nTwo",
"key3": "One\nTwo"
}
},
"table": {
"inline": {
"name": {
"first": "Tom",
"last": "Preston-Werner"
},
"point": {
"x": 1,
"y": 2
}
},
"key": "value",
"subtable": {
"key": "another value"
}
},
"x": {
"y": {
"z": {
"w": {}
}
}
}
}

32
vendor/github.com/pelletier/go-toml/benchmark.sh generated vendored Executable file
View File

@ -0,0 +1,32 @@
#!/bin/bash
set -e
reference_ref=${1:-master}
reference_git=${2:-.}
if ! `hash benchstat 2>/dev/null`; then
echo "Installing benchstat"
go get golang.org/x/perf/cmd/benchstat
go install golang.org/x/perf/cmd/benchstat
fi
tempdir=`mktemp -d /tmp/go-toml-benchmark-XXXXXX`
ref_tempdir="${tempdir}/ref"
ref_benchmark="${ref_tempdir}/benchmark-`echo -n ${reference_ref}|tr -s '/' '-'`.txt"
local_benchmark="`pwd`/benchmark-local.txt"
echo "=== ${reference_ref} (${ref_tempdir})"
git clone ${reference_git} ${ref_tempdir} >/dev/null 2>/dev/null
pushd ${ref_tempdir} >/dev/null
git checkout ${reference_ref} >/dev/null 2>/dev/null
go test -bench=. -benchmem | tee ${ref_benchmark}
popd >/dev/null
echo ""
echo "=== local"
go test -bench=. -benchmem | tee ${local_benchmark}
echo ""
echo "=== diff"
benchstat -delta-test=none ${ref_benchmark} ${local_benchmark}

244
vendor/github.com/pelletier/go-toml/benchmark.toml generated vendored Normal file
View File

@ -0,0 +1,244 @@
################################################################################
## Comment
# Speak your mind with the hash symbol. They go from the symbol to the end of
# the line.
################################################################################
## Table
# Tables (also known as hash tables or dictionaries) are collections of
# key/value pairs. They appear in square brackets on a line by themselves.
[table]
key = "value" # Yeah, you can do this.
# Nested tables are denoted by table names with dots in them. Name your tables
# whatever crap you please, just don't use #, ., [ or ].
[table.subtable]
key = "another value"
# You don't need to specify all the super-tables if you don't want to. TOML
# knows how to do it for you.
# [x] you
# [x.y] don't
# [x.y.z] need these
[x.y.z.w] # for this to work
################################################################################
## Inline Table
# Inline tables provide a more compact syntax for expressing tables. They are
# especially useful for grouped data that can otherwise quickly become verbose.
# Inline tables are enclosed in curly braces `{` and `}`. No newlines are
# allowed between the curly braces unless they are valid within a value.
[table.inline]
name = { first = "Tom", last = "Preston-Werner" }
point = { x = 1, y = 2 }
################################################################################
## String
# There are four ways to express strings: basic, multi-line basic, literal, and
# multi-line literal. All strings must contain only valid UTF-8 characters.
[string.basic]
basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF."
[string.multiline]
# The following strings are byte-for-byte equivalent:
key1 = "One\nTwo"
key2 = """One\nTwo"""
key3 = """
One
Two"""
[string.multiline.continued]
# The following strings are byte-for-byte equivalent:
key1 = "The quick brown fox jumps over the lazy dog."
key2 = """
The quick brown \
fox jumps over \
the lazy dog."""
key3 = """\
The quick brown \
fox jumps over \
the lazy dog.\
"""
[string.literal]
# What you see is what you get.
winpath = 'C:\Users\nodejs\templates'
winpath2 = '\\ServerX\admin$\system32\'
quoted = 'Tom "Dubs" Preston-Werner'
regex = '<\i\c*\s*>'
[string.literal.multiline]
regex2 = '''I [dw]on't need \d{2} apples'''
lines = '''
The first newline is
trimmed in raw strings.
All other whitespace
is preserved.
'''
################################################################################
## Integer
# Integers are whole numbers. Positive numbers may be prefixed with a plus sign.
# Negative numbers are prefixed with a minus sign.
[integer]
key1 = +99
key2 = 42
key3 = 0
key4 = -17
[integer.underscores]
# For large numbers, you may use underscores to enhance readability. Each
# underscore must be surrounded by at least one digit.
key1 = 1_000
key2 = 5_349_221
key3 = 1_2_3_4_5 # valid but inadvisable
################################################################################
## Float
# A float consists of an integer part (which may be prefixed with a plus or
# minus sign) followed by a fractional part and/or an exponent part.
[float.fractional]
key1 = +1.0
key2 = 3.1415
key3 = -0.01
[float.exponent]
key1 = 5e+22
key2 = 1e6
key3 = -2E-2
[float.both]
key = 6.626e-34
[float.underscores]
key1 = 9_224_617.445_991_228_313
key2 = 1e1_00
################################################################################
## Boolean
# Booleans are just the tokens you're used to. Always lowercase.
[boolean]
True = true
False = false
################################################################################
## Datetime
# Datetimes are RFC 3339 dates.
[datetime]
key1 = 1979-05-27T07:32:00Z
key2 = 1979-05-27T00:32:00-07:00
key3 = 1979-05-27T00:32:00.999999-07:00
################################################################################
## Array
# Arrays are square brackets with other primitives inside. Whitespace is
# ignored. Elements are separated by commas. Data types may not be mixed.
[array]
key1 = [ 1, 2, 3 ]
key2 = [ "red", "yellow", "green" ]
key3 = [ [ 1, 2 ], [3, 4, 5] ]
#key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok
# Arrays can also be multiline. So in addition to ignoring whitespace, arrays
# also ignore newlines between the brackets. Terminating commas are ok before
# the closing bracket.
key5 = [
1, 2, 3
]
key6 = [
1,
2, # this is ok
]
################################################################################
## Array of Tables
# These can be expressed by using a table name in double brackets. Each table
# with the same double bracketed name will be an element in the array. The
# tables are inserted in the order encountered.
[[products]]
name = "Hammer"
sku = 738594937
[[products]]
[[products]]
name = "Nail"
sku = 284758393
color = "gray"
# You can create nested arrays of tables as well.
[[fruit]]
name = "apple"
[fruit.physical]
color = "red"
shape = "round"
[[fruit.variety]]
name = "red delicious"
[[fruit.variety]]
name = "granny smith"
[[fruit]]
name = "banana"
[[fruit.variety]]
name = "plantain"

121
vendor/github.com/pelletier/go-toml/benchmark.yml generated vendored Normal file
View File

@ -0,0 +1,121 @@
---
array:
key1:
- 1
- 2
- 3
key2:
- red
- yellow
- green
key3:
- - 1
- 2
- - 3
- 4
- 5
key4:
- - 1
- 2
- - a
- b
- c
key5:
- 1
- 2
- 3
key6:
- 1
- 2
boolean:
'False': false
'True': true
datetime:
key1: '1979-05-27T07:32:00Z'
key2: '1979-05-27T00:32:00-07:00'
key3: '1979-05-27T00:32:00.999999-07:00'
float:
both:
key: 6.626e-34
exponent:
key1: 5.0e+22
key2: 1000000
key3: -0.02
fractional:
key1: 1
key2: 3.1415
key3: -0.01
underscores:
key1: 9224617.445991227
key2: 1.0e+100
fruit:
- name: apple
physical:
color: red
shape: round
variety:
- name: red delicious
- name: granny smith
- name: banana
variety:
- name: plantain
integer:
key1: 99
key2: 42
key3: 0
key4: -17
underscores:
key1: 1000
key2: 5349221
key3: 12345
products:
- name: Hammer
sku: 738594937
- {}
- color: gray
name: Nail
sku: 284758393
string:
basic:
basic: "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."
literal:
multiline:
lines: |
The first newline is
trimmed in raw strings.
All other whitespace
is preserved.
regex2: I [dw]on't need \d{2} apples
quoted: Tom "Dubs" Preston-Werner
regex: "<\\i\\c*\\s*>"
winpath: C:\Users\nodejs\templates
winpath2: "\\\\ServerX\\admin$\\system32\\"
multiline:
continued:
key1: The quick brown fox jumps over the lazy dog.
key2: The quick brown fox jumps over the lazy dog.
key3: The quick brown fox jumps over the lazy dog.
key1: |-
One
Two
key2: |-
One
Two
key3: |-
One
Two
table:
inline:
name:
first: Tom
last: Preston-Werner
point:
x: 1
y: 2
key: value
subtable:
key: another value
x:
y:
z:
w: {}

View File

@ -1,6 +0,0 @@
#!/bin/bash
# fail out of the script if anything here fails
set -e
# clear out stuff generated by test.sh
rm -rf src test_program_bin toml-test

View File

@ -1,250 +1,23 @@
// Package toml is a TOML markup language parser.
// Package toml is a TOML parser and manipulation library.
//
// This version supports the specification as described in
// https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md
//
// TOML Parsing
// Marshaling
//
// TOML data may be parsed in two ways: by file, or by string.
// Go-toml can marshal and unmarshal TOML documents from and to data
// structures.
//
// // load TOML data by filename
// tree, err := toml.LoadFile("filename.toml")
// TOML document as a tree
//
// // load TOML data stored in a string
// tree, err := toml.Load(stringContainingTomlData)
// Go-toml can operate on a TOML document as a tree. Use one of the Load*
// functions to parse TOML data and obtain a Tree instance, then one of its
// methods to manipulate the tree.
//
// Either way, the result is a TomlTree object that can be used to navigate the
// structure and data within the original document.
// JSONPath-like queries
//
//
// Getting data from the TomlTree
//
// After parsing TOML data with Load() or LoadFile(), use the Has() and Get()
// methods on the returned TomlTree, to find your way through the document data.
//
// if tree.Has('foo') {
// fmt.Prinln("foo is: %v", tree.Get('foo'))
// }
//
// Working with Paths
//
// Go-toml has support for basic dot-separated key paths on the Has(), Get(), Set()
// and GetDefault() methods. These are the same kind of key paths used within the
// TOML specification for struct tames.
//
// // looks for a key named 'baz', within struct 'bar', within struct 'foo'
// tree.Has("foo.bar.baz")
//
// // returns the key at this path, if it is there
// tree.Get("foo.bar.baz")
//
// TOML allows keys to contain '.', which can cause this syntax to be problematic
// for some documents. In such cases, use the GetPath(), HasPath(), and SetPath(),
// methods to explicitly define the path. This form is also faster, since
// it avoids having to parse the passed key for '.' delimiters.
//
// // looks for a key named 'baz', within struct 'bar', within struct 'foo'
// tree.HasPath(string{}{"foo","bar","baz"})
//
// // returns the key at this path, if it is there
// tree.GetPath(string{}{"foo","bar","baz"})
//
// Note that this is distinct from the heavyweight query syntax supported by
// TomlTree.Query() and the Query() struct (see below).
//
// Position Support
//
// Each element within the TomlTree is stored with position metadata, which is
// invaluable for providing semantic feedback to a user. This helps in
// situations where the TOML file parses correctly, but contains data that is
// not correct for the application. In such cases, an error message can be
// generated that indicates the problem line and column number in the source
// TOML document.
//
// // load TOML data
// tree, _ := toml.Load("filename.toml")
//
// // get an entry and report an error if it's the wrong type
// element := tree.Get("foo")
// if value, ok := element.(int64); !ok {
// return fmt.Errorf("%v: Element 'foo' must be an integer", tree.GetPosition("foo"))
// }
//
// // report an error if an expected element is missing
// if !tree.Has("bar") {
// return fmt.Errorf("%v: Expected 'bar' element", tree.GetPosition(""))
// }
//
// Query Support
//
// The TOML query path implementation is based loosely on the JSONPath specification:
// http://goessner.net/articles/JsonPath/
//
// The idea behind a query path is to allow quick access to any element, or set
// of elements within TOML document, with a single expression.
//
// result, err := tree.Query("$.foo.bar.baz")
//
// This is roughly equivalent to:
//
// next := tree.Get("foo")
// if next != nil {
// next = next.Get("bar")
// if next != nil {
// next = next.Get("baz")
// }
// }
// result := next
//
// err is nil if any parsing exception occurs.
//
// If no node in the tree matches the query, result will simply contain an empty list of
// items.
//
// As illustrated above, the query path is much more efficient, especially since
// the structure of the TOML file can vary. Rather than making assumptions about
// a document's structure, a query allows the programmer to make structured
// requests into the document, and get zero or more values as a result.
//
// The syntax of a query begins with a root token, followed by any number
// sub-expressions:
//
// $
// Root of the TOML tree. This must always come first.
// .name
// Selects child of this node, where 'name' is a TOML key
// name.
// ['name']
// Selects child of this node, where 'name' is a string
// containing a TOML key name.
// [index]
// Selcts child array element at 'index'.
// ..expr
// Recursively selects all children, filtered by an a union,
// index, or slice expression.
// ..*
// Recursive selection of all nodes at this point in the
// tree.
// .*
// Selects all children of the current node.
// [expr,expr]
// Union operator - a logical 'or' grouping of two or more
// sub-expressions: index, key name, or filter.
// [start:end:step]
// Slice operator - selects array elements from start to
// end-1, at the given step. All three arguments are
// optional.
// [?(filter)]
// Named filter expression - the function 'filter' is
// used to filter children at this node.
//
// Query Indexes And Slices
//
// Index expressions perform no bounds checking, and will contribute no
// values to the result set if the provided index or index range is invalid.
// Negative indexes represent values from the end of the array, counting backwards.
//
// // select the last index of the array named 'foo'
// tree.Query("$.foo[-1]")
//
// Slice expressions are supported, by using ':' to separate a start/end index pair.
//
// // select up to the first five elements in the array
// tree.Query("$.foo[0:5]")
//
// Slice expressions also allow negative indexes for the start and stop
// arguments.
//
// // select all array elements.
// tree.Query("$.foo[0:-1]")
//
// Slice expressions may have an optional stride/step parameter:
//
// // select every other element
// tree.Query("$.foo[0:-1:2]")
//
// Slice start and end parameters are also optional:
//
// // these are all equivalent and select all the values in the array
// tree.Query("$.foo[:]")
// tree.Query("$.foo[0:]")
// tree.Query("$.foo[:-1]")
// tree.Query("$.foo[0:-1:]")
// tree.Query("$.foo[::1]")
// tree.Query("$.foo[0::1]")
// tree.Query("$.foo[:-1:1]")
// tree.Query("$.foo[0:-1:1]")
//
// Query Filters
//
// Query filters are used within a Union [,] or single Filter [] expression.
// A filter only allows nodes that qualify through to the next expression,
// and/or into the result set.
//
// // returns children of foo that are permitted by the 'bar' filter.
// tree.Query("$.foo[?(bar)]")
//
// There are several filters provided with the library:
//
// tree
// Allows nodes of type TomlTree.
// int
// Allows nodes of type int64.
// float
// Allows nodes of type float64.
// string
// Allows nodes of type string.
// time
// Allows nodes of type time.Time.
// bool
// Allows nodes of type bool.
//
// Query Results
//
// An executed query returns a QueryResult object. This contains the nodes
// in the TOML tree that qualify the query expression. Position information
// is also available for each value in the set.
//
// // display the results of a query
// results := tree.Query("$.foo.bar.baz")
// for idx, value := results.Values() {
// fmt.Println("%v: %v", results.Positions()[idx], value)
// }
//
// Compiled Queries
//
// Queries may be executed directly on a TomlTree object, or compiled ahead
// of time and executed discretely. The former is more convienent, but has the
// penalty of having to recompile the query expression each time.
//
// // basic query
// results := tree.Query("$.foo.bar.baz")
//
// // compiled query
// query := toml.CompileQuery("$.foo.bar.baz")
// results := query.Execute(tree)
//
// // run the compiled query again on a different tree
// moreResults := query.Execute(anotherTree)
//
// User Defined Query Filters
//
// Filter expressions may also be user defined by using the SetFilter()
// function on the Query object. The function must return true/false, which
// signifies if the passed node is kept or discarded, respectively.
//
// // create a query that references a user-defined filter
// query, _ := CompileQuery("$[?(bazOnly)]")
//
// // define the filter, and assign it to the query
// query.SetFilter("bazOnly", func(node interface{}) bool{
// if tree, ok := node.(*TomlTree); ok {
// return tree.Has("baz")
// }
// return false // reject all other node types
// })
//
// // run the query
// query.Execute(tree)
// The package github.com/pelletier/go-toml/query implements a system
// similar to JSONPath to quickly retrive elements of a TOML document using a
// single expression. See the package documentation for more information.
//
package toml

View File

@ -4,6 +4,7 @@ package toml
import (
"bytes"
"errors"
"fmt"
"unicode"
)
@ -12,6 +13,7 @@ func parseKey(key string) ([]string, error) {
groups := []string{}
var buffer bytes.Buffer
inQuotes := false
wasInQuotes := false
escapeNext := false
ignoreSpace := true
expectDot := false
@ -33,16 +35,27 @@ func parseKey(key string) ([]string, error) {
escapeNext = true
continue
case '"':
if inQuotes {
groups = append(groups, buffer.String())
buffer.Reset()
wasInQuotes = true
}
inQuotes = !inQuotes
expectDot = false
case '.':
if inQuotes {
buffer.WriteRune(char)
} else {
groups = append(groups, buffer.String())
buffer.Reset()
if !wasInQuotes {
if buffer.Len() == 0 {
return nil, errors.New("empty table key")
}
groups = append(groups, buffer.String())
buffer.Reset()
}
ignoreSpace = true
expectDot = false
wasInQuotes = false
}
case ' ':
if inQuotes {
@ -55,23 +68,23 @@ func parseKey(key string) ([]string, error) {
return nil, fmt.Errorf("invalid bare character: %c", char)
}
if !inQuotes && expectDot {
return nil, fmt.Errorf("what?")
return nil, errors.New("what?")
}
buffer.WriteRune(char)
expectDot = false
}
}
if inQuotes {
return nil, fmt.Errorf("mismatched quotes")
return nil, errors.New("mismatched quotes")
}
if escapeNext {
return nil, fmt.Errorf("unfinished escape sequence")
return nil, errors.New("unfinished escape sequence")
}
if buffer.Len() > 0 {
groups = append(groups, buffer.String())
}
if len(groups) == 0 {
return nil, fmt.Errorf("empty key")
return nil, errors.New("empty key")
}
return groups, nil
}

View File

@ -1,19 +1,17 @@
// TOML lexer.
//
// Written using the principles developped by Rob Pike in
// Written using the principles developed by Rob Pike in
// http://www.youtube.com/watch?v=HxaD_trXwRE
package toml
import (
"bytes"
"errors"
"fmt"
"io"
"regexp"
"strconv"
"strings"
"github.com/pelletier/go-buffruneio"
)
var dateRegexp *regexp.Regexp
@ -23,29 +21,29 @@ type tomlLexStateFn func() tomlLexStateFn
// Define lexer
type tomlLexer struct {
input *buffruneio.Reader // Textual source
buffer []rune // Runes composing the current token
tokens chan token
depth int
line int
col int
endbufferLine int
endbufferCol int
inputIdx int
input []rune // Textual source
currentTokenStart int
currentTokenStop int
tokens []token
depth int
line int
col int
endbufferLine int
endbufferCol int
}
// Basic read operations on input
func (l *tomlLexer) read() rune {
r, err := l.input.ReadRune()
if err != nil {
panic(err)
}
r := l.peek()
if r == '\n' {
l.endbufferLine++
l.endbufferCol = 1
} else {
l.endbufferCol++
}
l.inputIdx++
return r
}
@ -53,13 +51,13 @@ func (l *tomlLexer) next() rune {
r := l.read()
if r != eof {
l.buffer = append(l.buffer, r)
l.currentTokenStop++
}
return r
}
func (l *tomlLexer) ignore() {
l.buffer = make([]rune, 0)
l.currentTokenStart = l.currentTokenStop
l.line = l.endbufferLine
l.col = l.endbufferCol
}
@ -76,49 +74,46 @@ func (l *tomlLexer) fastForward(n int) {
}
func (l *tomlLexer) emitWithValue(t tokenType, value string) {
l.tokens <- token{
l.tokens = append(l.tokens, token{
Position: Position{l.line, l.col},
typ: t,
val: value,
}
})
l.ignore()
}
func (l *tomlLexer) emit(t tokenType) {
l.emitWithValue(t, string(l.buffer))
l.emitWithValue(t, string(l.input[l.currentTokenStart:l.currentTokenStop]))
}
func (l *tomlLexer) peek() rune {
r, err := l.input.ReadRune()
if err != nil {
panic(err)
if l.inputIdx >= len(l.input) {
return eof
}
l.input.UnreadRune()
return r
return l.input[l.inputIdx]
}
func (l *tomlLexer) peekString(size int) string {
maxIdx := len(l.input)
upperIdx := l.inputIdx + size // FIXME: potential overflow
if upperIdx > maxIdx {
upperIdx = maxIdx
}
return string(l.input[l.inputIdx:upperIdx])
}
func (l *tomlLexer) follow(next string) bool {
for _, expectedRune := range next {
r, err := l.input.ReadRune()
defer l.input.UnreadRune()
if err != nil {
panic(err)
}
if expectedRune != r {
return false
}
}
return true
return next == l.peekString(len(next))
}
// Error management
func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn {
l.tokens <- token{
l.tokens = append(l.tokens, token{
Position: Position{l.line, l.col},
typ: tokenError,
val: fmt.Sprintf(format, args...),
}
})
return nil
}
@ -129,9 +124,9 @@ func (l *tomlLexer) lexVoid() tomlLexStateFn {
next := l.peek()
switch next {
case '[':
return l.lexKeyGroup
return l.lexTableKey
case '#':
return l.lexComment
return l.lexComment(l.lexVoid)
case '=':
return l.lexEqual
case '\r':
@ -182,7 +177,7 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
case '}':
return l.lexRightCurlyBrace
case '#':
return l.lexComment
return l.lexComment(l.lexRvalue)
case '"':
return l.lexString
case '\'':
@ -219,7 +214,7 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
break
}
possibleDate := string(l.input.Peek(35))
possibleDate := l.peekString(35)
dateMatch := dateRegexp.FindString(possibleDate)
if dateMatch != "" {
l.fastForward(len(dateMatch))
@ -309,15 +304,17 @@ func (l *tomlLexer) lexKey() tomlLexStateFn {
return l.lexVoid
}
func (l *tomlLexer) lexComment() tomlLexStateFn {
for next := l.peek(); next != '\n' && next != eof; next = l.peek() {
if next == '\r' && l.follow("\r\n") {
break
func (l *tomlLexer) lexComment(previousState tomlLexStateFn) tomlLexStateFn {
return func() tomlLexStateFn {
for next := l.peek(); next != '\n' && next != eof; next = l.peek() {
if next == '\r' && l.follow("\r\n") {
break
}
l.next()
}
l.next()
l.ignore()
return previousState
}
l.ignore()
return l.lexVoid
}
func (l *tomlLexer) lexLeftBracket() tomlLexStateFn {
@ -516,25 +513,25 @@ func (l *tomlLexer) lexString() tomlLexStateFn {
return l.lexRvalue
}
func (l *tomlLexer) lexKeyGroup() tomlLexStateFn {
func (l *tomlLexer) lexTableKey() tomlLexStateFn {
l.next()
if l.peek() == '[' {
// token '[[' signifies an array of anonymous key groups
// token '[[' signifies an array of tables
l.next()
l.emit(tokenDoubleLeftBracket)
return l.lexInsideKeyGroupArray
return l.lexInsideTableArrayKey
}
// vanilla key group
// vanilla table key
l.emit(tokenLeftBracket)
return l.lexInsideKeyGroup
return l.lexInsideTableKey
}
func (l *tomlLexer) lexInsideKeyGroupArray() tomlLexStateFn {
func (l *tomlLexer) lexInsideTableArrayKey() tomlLexStateFn {
for r := l.peek(); r != eof; r = l.peek() {
switch r {
case ']':
if len(l.buffer) > 0 {
if l.currentTokenStop > l.currentTokenStart {
l.emit(tokenKeyGroupArray)
}
l.next()
@ -545,31 +542,31 @@ func (l *tomlLexer) lexInsideKeyGroupArray() tomlLexStateFn {
l.emit(tokenDoubleRightBracket)
return l.lexVoid
case '[':
return l.errorf("group name cannot contain ']'")
return l.errorf("table array key cannot contain ']'")
default:
l.next()
}
}
return l.errorf("unclosed key group array")
return l.errorf("unclosed table array key")
}
func (l *tomlLexer) lexInsideKeyGroup() tomlLexStateFn {
func (l *tomlLexer) lexInsideTableKey() tomlLexStateFn {
for r := l.peek(); r != eof; r = l.peek() {
switch r {
case ']':
if len(l.buffer) > 0 {
if l.currentTokenStop > l.currentTokenStart {
l.emit(tokenKeyGroup)
}
l.next()
l.emit(tokenRightBracket)
return l.lexVoid
case '[':
return l.errorf("group name cannot contain ']'")
return l.errorf("table key cannot contain ']'")
default:
l.next()
}
}
return l.errorf("unclosed key group")
return l.errorf("unclosed table key")
}
func (l *tomlLexer) lexRightBracket() tomlLexStateFn {
@ -632,7 +629,6 @@ func (l *tomlLexer) run() {
for state := l.lexVoid; state != nil; {
state = state()
}
close(l.tokens)
}
func init() {
@ -640,16 +636,16 @@ func init() {
}
// Entry point
func lexToml(input io.Reader) chan token {
bufferedInput := buffruneio.NewReader(input)
func lexToml(inputBytes []byte) []token {
runes := bytes.Runes(inputBytes)
l := &tomlLexer{
input: bufferedInput,
tokens: make(chan token),
input: runes,
tokens: make([]token, 0, 256),
line: 1,
col: 1,
endbufferLine: 1,
endbufferCol: 1,
}
go l.run()
l.run()
return l.tokens
}

489
vendor/github.com/pelletier/go-toml/marshal.go generated vendored Normal file
View File

@ -0,0 +1,489 @@
package toml
import (
"bytes"
"errors"
"fmt"
"reflect"
"strings"
"time"
)
type tomlOpts struct {
name string
include bool
omitempty bool
}
var timeType = reflect.TypeOf(time.Time{})
var marshalerType = reflect.TypeOf(new(Marshaler)).Elem()
// Check if the given marshall type maps to a Tree primitive
func isPrimitive(mtype reflect.Type) bool {
switch mtype.Kind() {
case reflect.Ptr:
return isPrimitive(mtype.Elem())
case reflect.Bool:
return true
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return true
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return true
case reflect.Float32, reflect.Float64:
return true
case reflect.String:
return true
case reflect.Struct:
return mtype == timeType || isCustomMarshaler(mtype)
default:
return false
}
}
// Check if the given marshall type maps to a Tree slice
func isTreeSlice(mtype reflect.Type) bool {
switch mtype.Kind() {
case reflect.Slice:
return !isOtherSlice(mtype)
default:
return false
}
}
// Check if the given marshall type maps to a non-Tree slice
func isOtherSlice(mtype reflect.Type) bool {
switch mtype.Kind() {
case reflect.Ptr:
return isOtherSlice(mtype.Elem())
case reflect.Slice:
return isPrimitive(mtype.Elem()) || isOtherSlice(mtype.Elem())
default:
return false
}
}
// Check if the given marshall type maps to a Tree
func isTree(mtype reflect.Type) bool {
switch mtype.Kind() {
case reflect.Map:
return true
case reflect.Struct:
return !isPrimitive(mtype)
default:
return false
}
}
func isCustomMarshaler(mtype reflect.Type) bool {
return mtype.Implements(marshalerType)
}
func callCustomMarshaler(mval reflect.Value) ([]byte, error) {
return mval.Interface().(Marshaler).MarshalTOML()
}
// Marshaler is the interface implemented by types that
// can marshal themselves into valid TOML.
type Marshaler interface {
MarshalTOML() ([]byte, error)
}
/*
Marshal returns the TOML encoding of v. Behavior is similar to the Go json
encoder, except that there is no concept of a Marshaler interface or MarshalTOML
function for sub-structs, and currently only definite types can be marshaled
(i.e. no `interface{}`).
Note that pointers are automatically assigned the "omitempty" option, as TOML
explicity does not handle null values (saying instead the label should be
dropped).
Tree structural types and corresponding marshal types:
*Tree (*)struct, (*)map[string]interface{}
[]*Tree (*)[](*)struct, (*)[](*)map[string]interface{}
[]interface{} (as interface{}) (*)[]primitive, (*)[]([]interface{})
interface{} (*)primitive
Tree primitive types and corresponding marshal types:
uint64 uint, uint8-uint64, pointers to same
int64 int, int8-uint64, pointers to same
float64 float32, float64, pointers to same
string string, pointers to same
bool bool, pointers to same
time.Time time.Time{}, pointers to same
*/
func Marshal(v interface{}) ([]byte, error) {
mtype := reflect.TypeOf(v)
if mtype.Kind() != reflect.Struct {
return []byte{}, errors.New("Only a struct can be marshaled to TOML")
}
sval := reflect.ValueOf(v)
if isCustomMarshaler(mtype) {
return callCustomMarshaler(sval)
}
t, err := valueToTree(mtype, sval)
if err != nil {
return []byte{}, err
}
s, err := t.ToTomlString()
return []byte(s), err
}
// Convert given marshal struct or map value to toml tree
func valueToTree(mtype reflect.Type, mval reflect.Value) (*Tree, error) {
if mtype.Kind() == reflect.Ptr {
return valueToTree(mtype.Elem(), mval.Elem())
}
tval := newTree()
switch mtype.Kind() {
case reflect.Struct:
for i := 0; i < mtype.NumField(); i++ {
mtypef, mvalf := mtype.Field(i), mval.Field(i)
opts := tomlOptions(mtypef)
if opts.include && (!opts.omitempty || !isZero(mvalf)) {
val, err := valueToToml(mtypef.Type, mvalf)
if err != nil {
return nil, err
}
tval.Set(opts.name, val)
}
}
case reflect.Map:
for _, key := range mval.MapKeys() {
mvalf := mval.MapIndex(key)
val, err := valueToToml(mtype.Elem(), mvalf)
if err != nil {
return nil, err
}
tval.Set(key.String(), val)
}
}
return tval, nil
}
// Convert given marshal slice to slice of Toml trees
func valueToTreeSlice(mtype reflect.Type, mval reflect.Value) ([]*Tree, error) {
tval := make([]*Tree, mval.Len(), mval.Len())
for i := 0; i < mval.Len(); i++ {
val, err := valueToTree(mtype.Elem(), mval.Index(i))
if err != nil {
return nil, err
}
tval[i] = val
}
return tval, nil
}
// Convert given marshal slice to slice of toml values
func valueToOtherSlice(mtype reflect.Type, mval reflect.Value) (interface{}, error) {
tval := make([]interface{}, mval.Len(), mval.Len())
for i := 0; i < mval.Len(); i++ {
val, err := valueToToml(mtype.Elem(), mval.Index(i))
if err != nil {
return nil, err
}
tval[i] = val
}
return tval, nil
}
// Convert given marshal value to toml value
func valueToToml(mtype reflect.Type, mval reflect.Value) (interface{}, error) {
if mtype.Kind() == reflect.Ptr {
return valueToToml(mtype.Elem(), mval.Elem())
}
switch {
case isCustomMarshaler(mtype):
return callCustomMarshaler(mval)
case isTree(mtype):
return valueToTree(mtype, mval)
case isTreeSlice(mtype):
return valueToTreeSlice(mtype, mval)
case isOtherSlice(mtype):
return valueToOtherSlice(mtype, mval)
default:
switch mtype.Kind() {
case reflect.Bool:
return mval.Bool(), nil
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return mval.Int(), nil
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return mval.Uint(), nil
case reflect.Float32, reflect.Float64:
return mval.Float(), nil
case reflect.String:
return mval.String(), nil
case reflect.Struct:
return mval.Interface().(time.Time), nil
default:
return nil, fmt.Errorf("Marshal can't handle %v(%v)", mtype, mtype.Kind())
}
}
}
// Unmarshal attempts to unmarshal the Tree into a Go struct pointed by v.
// Neither Unmarshaler interfaces nor UnmarshalTOML functions are supported for
// sub-structs, and only definite types can be unmarshaled.
func (t *Tree) Unmarshal(v interface{}) error {
mtype := reflect.TypeOf(v)
if mtype.Kind() != reflect.Ptr || mtype.Elem().Kind() != reflect.Struct {
return errors.New("Only a pointer to struct can be unmarshaled from TOML")
}
sval, err := valueFromTree(mtype.Elem(), t)
if err != nil {
return err
}
reflect.ValueOf(v).Elem().Set(sval)
return nil
}
// Unmarshal parses the TOML-encoded data and stores the result in the value
// pointed to by v. Behavior is similar to the Go json encoder, except that there
// is no concept of an Unmarshaler interface or UnmarshalTOML function for
// sub-structs, and currently only definite types can be unmarshaled to (i.e. no
// `interface{}`).
//
// See Marshal() documentation for types mapping table.
func Unmarshal(data []byte, v interface{}) error {
t, err := LoadReader(bytes.NewReader(data))
if err != nil {
return err
}
return t.Unmarshal(v)
}
// Convert toml tree to marshal struct or map, using marshal type
func valueFromTree(mtype reflect.Type, tval *Tree) (reflect.Value, error) {
if mtype.Kind() == reflect.Ptr {
return unwrapPointer(mtype, tval)
}
var mval reflect.Value
switch mtype.Kind() {
case reflect.Struct:
mval = reflect.New(mtype).Elem()
for i := 0; i < mtype.NumField(); i++ {
mtypef := mtype.Field(i)
opts := tomlOptions(mtypef)
if opts.include {
baseKey := opts.name
keysToTry := []string{baseKey, strings.ToLower(baseKey), strings.ToTitle(baseKey)}
for _, key := range keysToTry {
exists := tval.Has(key)
if !exists {
continue
}
val := tval.Get(key)
mvalf, err := valueFromToml(mtypef.Type, val)
if err != nil {
return mval, formatError(err, tval.GetPosition(key))
}
mval.Field(i).Set(mvalf)
break
}
}
}
case reflect.Map:
mval = reflect.MakeMap(mtype)
for _, key := range tval.Keys() {
val := tval.Get(key)
mvalf, err := valueFromToml(mtype.Elem(), val)
if err != nil {
return mval, formatError(err, tval.GetPosition(key))
}
mval.SetMapIndex(reflect.ValueOf(key), mvalf)
}
}
return mval, nil
}
// Convert toml value to marshal struct/map slice, using marshal type
func valueFromTreeSlice(mtype reflect.Type, tval []*Tree) (reflect.Value, error) {
mval := reflect.MakeSlice(mtype, len(tval), len(tval))
for i := 0; i < len(tval); i++ {
val, err := valueFromTree(mtype.Elem(), tval[i])
if err != nil {
return mval, err
}
mval.Index(i).Set(val)
}
return mval, nil
}
// Convert toml value to marshal primitive slice, using marshal type
func valueFromOtherSlice(mtype reflect.Type, tval []interface{}) (reflect.Value, error) {
mval := reflect.MakeSlice(mtype, len(tval), len(tval))
for i := 0; i < len(tval); i++ {
val, err := valueFromToml(mtype.Elem(), tval[i])
if err != nil {
return mval, err
}
mval.Index(i).Set(val)
}
return mval, nil
}
// Convert toml value to marshal value, using marshal type
func valueFromToml(mtype reflect.Type, tval interface{}) (reflect.Value, error) {
if mtype.Kind() == reflect.Ptr {
return unwrapPointer(mtype, tval)
}
switch {
case isTree(mtype):
return valueFromTree(mtype, tval.(*Tree))
case isTreeSlice(mtype):
return valueFromTreeSlice(mtype, tval.([]*Tree))
case isOtherSlice(mtype):
return valueFromOtherSlice(mtype, tval.([]interface{}))
default:
switch mtype.Kind() {
case reflect.Bool:
val, ok := tval.(bool)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to bool", tval, tval)
}
return reflect.ValueOf(val), nil
case reflect.Int:
val, ok := tval.(int64)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to int", tval, tval)
}
return reflect.ValueOf(int(val)), nil
case reflect.Int8:
val, ok := tval.(int64)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to int", tval, tval)
}
return reflect.ValueOf(int8(val)), nil
case reflect.Int16:
val, ok := tval.(int64)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to int", tval, tval)
}
return reflect.ValueOf(int16(val)), nil
case reflect.Int32:
val, ok := tval.(int64)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to int", tval, tval)
}
return reflect.ValueOf(int32(val)), nil
case reflect.Int64:
val, ok := tval.(int64)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to int", tval, tval)
}
return reflect.ValueOf(val), nil
case reflect.Uint:
val, ok := tval.(int64)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to uint", tval, tval)
}
return reflect.ValueOf(uint(val)), nil
case reflect.Uint8:
val, ok := tval.(int64)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to uint", tval, tval)
}
return reflect.ValueOf(uint8(val)), nil
case reflect.Uint16:
val, ok := tval.(int64)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to uint", tval, tval)
}
return reflect.ValueOf(uint16(val)), nil
case reflect.Uint32:
val, ok := tval.(int64)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to uint", tval, tval)
}
return reflect.ValueOf(uint32(val)), nil
case reflect.Uint64:
val, ok := tval.(int64)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to uint", tval, tval)
}
return reflect.ValueOf(uint64(val)), nil
case reflect.Float32:
val, ok := tval.(float64)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to float", tval, tval)
}
return reflect.ValueOf(float32(val)), nil
case reflect.Float64:
val, ok := tval.(float64)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to float", tval, tval)
}
return reflect.ValueOf(val), nil
case reflect.String:
val, ok := tval.(string)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to string", tval, tval)
}
return reflect.ValueOf(val), nil
case reflect.Struct:
val, ok := tval.(time.Time)
if !ok {
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to time", tval, tval)
}
return reflect.ValueOf(val), nil
default:
return reflect.ValueOf(nil), fmt.Errorf("Unmarshal can't handle %v(%v)", mtype, mtype.Kind())
}
}
}
func unwrapPointer(mtype reflect.Type, tval interface{}) (reflect.Value, error) {
val, err := valueFromToml(mtype.Elem(), tval)
if err != nil {
return reflect.ValueOf(nil), err
}
mval := reflect.New(mtype.Elem())
mval.Elem().Set(val)
return mval, nil
}
func tomlOptions(vf reflect.StructField) tomlOpts {
tag := vf.Tag.Get("toml")
parse := strings.Split(tag, ",")
result := tomlOpts{vf.Name, true, false}
if parse[0] != "" {
if parse[0] == "-" && len(parse) == 1 {
result.include = false
} else {
result.name = strings.Trim(parse[0], " ")
}
}
if vf.PkgPath != "" {
result.include = false
}
if len(parse) > 1 && strings.Trim(parse[1], " ") == "omitempty" {
result.omitempty = true
}
if vf.Type.Kind() == reflect.Ptr {
result.omitempty = true
}
return result
}
func isZero(val reflect.Value) bool {
switch val.Type().Kind() {
case reflect.Map:
fallthrough
case reflect.Array:
fallthrough
case reflect.Slice:
return val.Len() == 0
default:
return reflect.DeepEqual(val.Interface(), reflect.Zero(val.Type()).Interface())
}
}
func formatError(err error, pos Position) error {
if err.Error()[0] == '(' { // Error already contains position information
return err
}
return fmt.Errorf("%s: %s", pos, err)
}

38
vendor/github.com/pelletier/go-toml/marshal_test.toml generated vendored Normal file
View File

@ -0,0 +1,38 @@
title = "TOML Marshal Testing"
[basic]
bool = true
date = 1979-05-27T07:32:00Z
float = 123.4
int = 5000
string = "Bite me"
uint = 5001
[basic_lists]
bools = [true,false,true]
dates = [1979-05-27T07:32:00Z,1980-05-27T07:32:00Z]
floats = [12.3,45.6,78.9]
ints = [8001,8001,8002]
strings = ["One","Two","Three"]
uints = [5002,5003]
[basic_map]
one = "one"
two = "two"
[subdoc]
[subdoc.first]
name = "First"
[subdoc.second]
name = "Second"
[[subdoclist]]
name = "List.First"
[[subdoclist]]
name = "List.Second"
[[subdocptrs]]
name = "Second"

View File

@ -1,234 +0,0 @@
package toml
import (
"fmt"
)
// support function to set positions for tomlValues
// NOTE: this is done to allow ctx.lastPosition to indicate the start of any
// values returned by the query engines
func tomlValueCheck(node interface{}, ctx *queryContext) interface{} {
switch castNode := node.(type) {
case *tomlValue:
ctx.lastPosition = castNode.position
return castNode.value
case []*TomlTree:
if len(castNode) > 0 {
ctx.lastPosition = castNode[0].position
}
return node
default:
return node
}
}
// base match
type matchBase struct {
next pathFn
}
func (f *matchBase) setNext(next pathFn) {
f.next = next
}
// terminating functor - gathers results
type terminatingFn struct {
// empty
}
func newTerminatingFn() *terminatingFn {
return &terminatingFn{}
}
func (f *terminatingFn) setNext(next pathFn) {
// do nothing
}
func (f *terminatingFn) call(node interface{}, ctx *queryContext) {
switch castNode := node.(type) {
case *TomlTree:
ctx.result.appendResult(node, castNode.position)
case *tomlValue:
ctx.result.appendResult(node, castNode.position)
default:
// use last position for scalars
ctx.result.appendResult(node, ctx.lastPosition)
}
}
// match single key
type matchKeyFn struct {
matchBase
Name string
}
func newMatchKeyFn(name string) *matchKeyFn {
return &matchKeyFn{Name: name}
}
func (f *matchKeyFn) call(node interface{}, ctx *queryContext) {
if array, ok := node.([]*TomlTree); ok {
for _, tree := range array {
item := tree.values[f.Name]
if item != nil {
f.next.call(item, ctx)
}
}
} else if tree, ok := node.(*TomlTree); ok {
item := tree.values[f.Name]
if item != nil {
f.next.call(item, ctx)
}
}
}
// match single index
type matchIndexFn struct {
matchBase
Idx int
}
func newMatchIndexFn(idx int) *matchIndexFn {
return &matchIndexFn{Idx: idx}
}
func (f *matchIndexFn) call(node interface{}, ctx *queryContext) {
if arr, ok := tomlValueCheck(node, ctx).([]interface{}); ok {
if f.Idx < len(arr) && f.Idx >= 0 {
f.next.call(arr[f.Idx], ctx)
}
}
}
// filter by slicing
type matchSliceFn struct {
matchBase
Start, End, Step int
}
func newMatchSliceFn(start, end, step int) *matchSliceFn {
return &matchSliceFn{Start: start, End: end, Step: step}
}
func (f *matchSliceFn) call(node interface{}, ctx *queryContext) {
if arr, ok := tomlValueCheck(node, ctx).([]interface{}); ok {
// adjust indexes for negative values, reverse ordering
realStart, realEnd := f.Start, f.End
if realStart < 0 {
realStart = len(arr) + realStart
}
if realEnd < 0 {
realEnd = len(arr) + realEnd
}
if realEnd < realStart {
realEnd, realStart = realStart, realEnd // swap
}
// loop and gather
for idx := realStart; idx < realEnd; idx += f.Step {
f.next.call(arr[idx], ctx)
}
}
}
// match anything
type matchAnyFn struct {
matchBase
}
func newMatchAnyFn() *matchAnyFn {
return &matchAnyFn{}
}
func (f *matchAnyFn) call(node interface{}, ctx *queryContext) {
if tree, ok := node.(*TomlTree); ok {
for _, v := range tree.values {
f.next.call(v, ctx)
}
}
}
// filter through union
type matchUnionFn struct {
Union []pathFn
}
func (f *matchUnionFn) setNext(next pathFn) {
for _, fn := range f.Union {
fn.setNext(next)
}
}
func (f *matchUnionFn) call(node interface{}, ctx *queryContext) {
for _, fn := range f.Union {
fn.call(node, ctx)
}
}
// match every single last node in the tree
type matchRecursiveFn struct {
matchBase
}
func newMatchRecursiveFn() *matchRecursiveFn {
return &matchRecursiveFn{}
}
func (f *matchRecursiveFn) call(node interface{}, ctx *queryContext) {
if tree, ok := node.(*TomlTree); ok {
var visit func(tree *TomlTree)
visit = func(tree *TomlTree) {
for _, v := range tree.values {
f.next.call(v, ctx)
switch node := v.(type) {
case *TomlTree:
visit(node)
case []*TomlTree:
for _, subtree := range node {
visit(subtree)
}
}
}
}
f.next.call(tree, ctx)
visit(tree)
}
}
// match based on an externally provided functional filter
type matchFilterFn struct {
matchBase
Pos Position
Name string
}
func newMatchFilterFn(name string, pos Position) *matchFilterFn {
return &matchFilterFn{Name: name, Pos: pos}
}
func (f *matchFilterFn) call(node interface{}, ctx *queryContext) {
fn, ok := (*ctx.filters)[f.Name]
if !ok {
panic(fmt.Sprintf("%s: query context does not have filter '%s'",
f.Pos.String(), f.Name))
}
switch castNode := tomlValueCheck(node, ctx).(type) {
case *TomlTree:
for _, v := range castNode.values {
if tv, ok := v.(*tomlValue); ok {
if fn(tv.value) {
f.next.call(v, ctx)
}
} else {
if fn(v) {
f.next.call(v, ctx)
}
}
}
case []interface{}:
for _, v := range castNode {
if fn(v) {
f.next.call(v, ctx)
}
}
}
}

View File

@ -3,6 +3,7 @@
package toml
import (
"errors"
"fmt"
"reflect"
"regexp"
@ -12,11 +13,11 @@ import (
)
type tomlParser struct {
flow chan token
tree *TomlTree
tokensBuffer []token
currentGroup []string
seenGroupKeys []string
flowIdx int
flow []token
tree *Tree
currentTable []string
seenTableKeys []string
}
type tomlParserStateFn func() tomlParserStateFn
@ -33,16 +34,10 @@ func (p *tomlParser) run() {
}
func (p *tomlParser) peek() *token {
if len(p.tokensBuffer) != 0 {
return &(p.tokensBuffer[0])
}
tok, ok := <-p.flow
if !ok {
if p.flowIdx >= len(p.flow) {
return nil
}
p.tokensBuffer = append(p.tokensBuffer, tok)
return &tok
return &p.flow[p.flowIdx]
}
func (p *tomlParser) assume(typ tokenType) {
@ -56,16 +51,12 @@ func (p *tomlParser) assume(typ tokenType) {
}
func (p *tomlParser) getToken() *token {
if len(p.tokensBuffer) != 0 {
tok := p.tokensBuffer[0]
p.tokensBuffer = p.tokensBuffer[1:]
return &tok
}
tok, ok := <-p.flow
if !ok {
tok := p.peek()
if tok == nil {
return nil
}
return &tok
p.flowIdx++
return tok
}
func (p *tomlParser) parseStart() tomlParserStateFn {
@ -95,48 +86,48 @@ func (p *tomlParser) parseGroupArray() tomlParserStateFn {
startToken := p.getToken() // discard the [[
key := p.getToken()
if key.typ != tokenKeyGroupArray {
p.raiseError(key, "unexpected token %s, was expecting a key group array", key)
p.raiseError(key, "unexpected token %s, was expecting a table array key", key)
}
// get or create group array element at the indicated part in the path
// get or create table array element at the indicated part in the path
keys, err := parseKey(key.val)
if err != nil {
p.raiseError(key, "invalid group array key: %s", err)
p.raiseError(key, "invalid table array key: %s", err)
}
p.tree.createSubTree(keys[:len(keys)-1], startToken.Position) // create parent entries
destTree := p.tree.GetPath(keys)
var array []*TomlTree
var array []*Tree
if destTree == nil {
array = make([]*TomlTree, 0)
} else if target, ok := destTree.([]*TomlTree); ok && target != nil {
array = destTree.([]*TomlTree)
array = make([]*Tree, 0)
} else if target, ok := destTree.([]*Tree); ok && target != nil {
array = destTree.([]*Tree)
} else {
p.raiseError(key, "key %s is already assigned and not of type group array", key)
p.raiseError(key, "key %s is already assigned and not of type table array", key)
}
p.currentGroup = keys
p.currentTable = keys
// add a new tree to the end of the group array
newTree := newTomlTree()
// add a new tree to the end of the table array
newTree := newTree()
newTree.position = startToken.Position
array = append(array, newTree)
p.tree.SetPath(p.currentGroup, array)
p.tree.SetPath(p.currentTable, array)
// remove all keys that were children of this group array
// remove all keys that were children of this table array
prefix := key.val + "."
found := false
for ii := 0; ii < len(p.seenGroupKeys); {
groupKey := p.seenGroupKeys[ii]
if strings.HasPrefix(groupKey, prefix) {
p.seenGroupKeys = append(p.seenGroupKeys[:ii], p.seenGroupKeys[ii+1:]...)
for ii := 0; ii < len(p.seenTableKeys); {
tableKey := p.seenTableKeys[ii]
if strings.HasPrefix(tableKey, prefix) {
p.seenTableKeys = append(p.seenTableKeys[:ii], p.seenTableKeys[ii+1:]...)
} else {
found = (groupKey == key.val)
found = (tableKey == key.val)
ii++
}
}
// keep this key name from use by other kinds of assignments
if !found {
p.seenGroupKeys = append(p.seenGroupKeys, key.val)
p.seenTableKeys = append(p.seenTableKeys, key.val)
}
// move to next parser state
@ -148,24 +139,24 @@ func (p *tomlParser) parseGroup() tomlParserStateFn {
startToken := p.getToken() // discard the [
key := p.getToken()
if key.typ != tokenKeyGroup {
p.raiseError(key, "unexpected token %s, was expecting a key group", key)
p.raiseError(key, "unexpected token %s, was expecting a table key", key)
}
for _, item := range p.seenGroupKeys {
for _, item := range p.seenTableKeys {
if item == key.val {
p.raiseError(key, "duplicated tables")
}
}
p.seenGroupKeys = append(p.seenGroupKeys, key.val)
p.seenTableKeys = append(p.seenTableKeys, key.val)
keys, err := parseKey(key.val)
if err != nil {
p.raiseError(key, "invalid group array key: %s", err)
p.raiseError(key, "invalid table array key: %s", err)
}
if err := p.tree.createSubTree(keys, startToken.Position); err != nil {
p.raiseError(key, "%s", err)
}
p.assume(tokenRightBracket)
p.currentGroup = keys
p.currentTable = keys
return p.parseStart
}
@ -174,26 +165,26 @@ func (p *tomlParser) parseAssign() tomlParserStateFn {
p.assume(tokenEqual)
value := p.parseRvalue()
var groupKey []string
if len(p.currentGroup) > 0 {
groupKey = p.currentGroup
var tableKey []string
if len(p.currentTable) > 0 {
tableKey = p.currentTable
} else {
groupKey = []string{}
tableKey = []string{}
}
// find the group to assign, looking out for arrays of groups
var targetNode *TomlTree
switch node := p.tree.GetPath(groupKey).(type) {
case []*TomlTree:
// find the table to assign, looking out for arrays of tables
var targetNode *Tree
switch node := p.tree.GetPath(tableKey).(type) {
case []*Tree:
targetNode = node[len(node)-1]
case *TomlTree:
case *Tree:
targetNode = node
default:
p.raiseError(key, "Unknown group type for path: %s",
strings.Join(groupKey, "."))
p.raiseError(key, "Unknown table type for path: %s",
strings.Join(tableKey, "."))
}
// assign value to the found group
// assign value to the found table
keyVals, err := parseKey(key.val)
if err != nil {
p.raiseError(key, "%s", err)
@ -203,7 +194,7 @@ func (p *tomlParser) parseAssign() tomlParserStateFn {
}
keyVal := keyVals[0]
localKey := []string{keyVal}
finalKey := append(groupKey, keyVal)
finalKey := append(tableKey, keyVal)
if targetNode.GetPath(localKey) != nil {
p.raiseError(key, "The following key was defined twice: %s",
strings.Join(finalKey, "."))
@ -211,7 +202,7 @@ func (p *tomlParser) parseAssign() tomlParserStateFn {
var toInsert interface{}
switch value.(type) {
case *TomlTree:
case *Tree, []*Tree:
toInsert = value
default:
toInsert = &tomlValue{value, key.Position}
@ -224,7 +215,7 @@ var numberUnderscoreInvalidRegexp *regexp.Regexp
func cleanupNumberToken(value string) (string, error) {
if numberUnderscoreInvalidRegexp.MatchString(value) {
return "", fmt.Errorf("invalid use of _ in number")
return "", errors.New("invalid use of _ in number")
}
cleanedVal := strings.Replace(value, "_", "", -1)
return cleanedVal, nil
@ -288,8 +279,8 @@ func tokenIsComma(t *token) bool {
return t != nil && t.typ == tokenComma
}
func (p *tomlParser) parseInlineTable() *TomlTree {
tree := newTomlTree()
func (p *tomlParser) parseInlineTable() *Tree {
tree := newTree()
var previous *token
Loop:
for {
@ -359,29 +350,29 @@ func (p *tomlParser) parseArray() interface{} {
p.getToken()
}
}
// An array of TomlTrees is actually an array of inline
// An array of Trees is actually an array of inline
// tables, which is a shorthand for a table array. If the
// array was not converted from []interface{} to []*TomlTree,
// array was not converted from []interface{} to []*Tree,
// the two notations would not be equivalent.
if arrayType == reflect.TypeOf(newTomlTree()) {
tomlArray := make([]*TomlTree, len(array))
if arrayType == reflect.TypeOf(newTree()) {
tomlArray := make([]*Tree, len(array))
for i, v := range array {
tomlArray[i] = v.(*TomlTree)
tomlArray[i] = v.(*Tree)
}
return tomlArray
}
return array
}
func parseToml(flow chan token) *TomlTree {
result := newTomlTree()
func parseToml(flow []token) *Tree {
result := newTree()
result.position = Position{1, 1}
parser := &tomlParser{
flowIdx: 0,
flow: flow,
tree: result,
tokensBuffer: make([]token, 0),
currentGroup: make([]string, 0),
seenGroupKeys: make([]string, 0),
currentTable: make([]string, 0),
seenTableKeys: make([]string, 0),
}
parser.run()
return result

View File

@ -1,153 +0,0 @@
package toml
import (
"time"
)
// NodeFilterFn represents a user-defined filter function, for use with
// Query.SetFilter().
//
// The return value of the function must indicate if 'node' is to be included
// at this stage of the TOML path. Returning true will include the node, and
// returning false will exclude it.
//
// NOTE: Care should be taken to write script callbacks such that they are safe
// to use from multiple goroutines.
type NodeFilterFn func(node interface{}) bool
// QueryResult is the result of Executing a Query.
type QueryResult struct {
items []interface{}
positions []Position
}
// appends a value/position pair to the result set.
func (r *QueryResult) appendResult(node interface{}, pos Position) {
r.items = append(r.items, node)
r.positions = append(r.positions, pos)
}
// Values is a set of values within a QueryResult. The order of values is not
// guaranteed to be in document order, and may be different each time a query is
// executed.
func (r QueryResult) Values() []interface{} {
values := make([]interface{}, len(r.items))
for i, v := range r.items {
o, ok := v.(*tomlValue)
if ok {
values[i] = o.value
} else {
values[i] = v
}
}
return values
}
// Positions is a set of positions for values within a QueryResult. Each index
// in Positions() corresponds to the entry in Value() of the same index.
func (r QueryResult) Positions() []Position {
return r.positions
}
// runtime context for executing query paths
type queryContext struct {
result *QueryResult
filters *map[string]NodeFilterFn
lastPosition Position
}
// generic path functor interface
type pathFn interface {
setNext(next pathFn)
call(node interface{}, ctx *queryContext)
}
// A Query is the representation of a compiled TOML path. A Query is safe
// for concurrent use by multiple goroutines.
type Query struct {
root pathFn
tail pathFn
filters *map[string]NodeFilterFn
}
func newQuery() *Query {
return &Query{
root: nil,
tail: nil,
filters: &defaultFilterFunctions,
}
}
func (q *Query) appendPath(next pathFn) {
if q.root == nil {
q.root = next
} else {
q.tail.setNext(next)
}
q.tail = next
next.setNext(newTerminatingFn()) // init the next functor
}
// CompileQuery compiles a TOML path expression. The returned Query can be used
// to match elements within a TomlTree and its descendants.
func CompileQuery(path string) (*Query, error) {
return parseQuery(lexQuery(path))
}
// Execute executes a query against a TomlTree, and returns the result of the query.
func (q *Query) Execute(tree *TomlTree) *QueryResult {
result := &QueryResult{
items: []interface{}{},
positions: []Position{},
}
if q.root == nil {
result.appendResult(tree, tree.GetPosition(""))
} else {
ctx := &queryContext{
result: result,
filters: q.filters,
}
q.root.call(tree, ctx)
}
return result
}
// SetFilter sets a user-defined filter function. These may be used inside
// "?(..)" query expressions to filter TOML document elements within a query.
func (q *Query) SetFilter(name string, fn NodeFilterFn) {
if q.filters == &defaultFilterFunctions {
// clone the static table
q.filters = &map[string]NodeFilterFn{}
for k, v := range defaultFilterFunctions {
(*q.filters)[k] = v
}
}
(*q.filters)[name] = fn
}
var defaultFilterFunctions = map[string]NodeFilterFn{
"tree": func(node interface{}) bool {
_, ok := node.(*TomlTree)
return ok
},
"int": func(node interface{}) bool {
_, ok := node.(int64)
return ok
},
"float": func(node interface{}) bool {
_, ok := node.(float64)
return ok
},
"string": func(node interface{}) bool {
_, ok := node.(string)
return ok
},
"time": func(node interface{}) bool {
_, ok := node.(time.Time)
return ok
},
"bool": func(node interface{}) bool {
_, ok := node.(bool)
return ok
},
}

View File

@ -1,356 +0,0 @@
// TOML JSONPath lexer.
//
// Written using the principles developed by Rob Pike in
// http://www.youtube.com/watch?v=HxaD_trXwRE
package toml
import (
"fmt"
"strconv"
"strings"
"unicode/utf8"
)
// Lexer state function
type queryLexStateFn func() queryLexStateFn
// Lexer definition
type queryLexer struct {
input string
start int
pos int
width int
tokens chan token
depth int
line int
col int
stringTerm string
}
func (l *queryLexer) run() {
for state := l.lexVoid; state != nil; {
state = state()
}
close(l.tokens)
}
func (l *queryLexer) nextStart() {
// iterate by runes (utf8 characters)
// search for newlines and advance line/col counts
for i := l.start; i < l.pos; {
r, width := utf8.DecodeRuneInString(l.input[i:])
if r == '\n' {
l.line++
l.col = 1
} else {
l.col++
}
i += width
}
// advance start position to next token
l.start = l.pos
}
func (l *queryLexer) emit(t tokenType) {
l.tokens <- token{
Position: Position{l.line, l.col},
typ: t,
val: l.input[l.start:l.pos],
}
l.nextStart()
}
func (l *queryLexer) emitWithValue(t tokenType, value string) {
l.tokens <- token{
Position: Position{l.line, l.col},
typ: t,
val: value,
}
l.nextStart()
}
func (l *queryLexer) next() rune {
if l.pos >= len(l.input) {
l.width = 0
return eof
}
var r rune
r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
l.pos += l.width
return r
}
func (l *queryLexer) ignore() {
l.nextStart()
}
func (l *queryLexer) backup() {
l.pos -= l.width
}
func (l *queryLexer) errorf(format string, args ...interface{}) queryLexStateFn {
l.tokens <- token{
Position: Position{l.line, l.col},
typ: tokenError,
val: fmt.Sprintf(format, args...),
}
return nil
}
func (l *queryLexer) peek() rune {
r := l.next()
l.backup()
return r
}
func (l *queryLexer) accept(valid string) bool {
if strings.ContainsRune(valid, l.next()) {
return true
}
l.backup()
return false
}
func (l *queryLexer) follow(next string) bool {
return strings.HasPrefix(l.input[l.pos:], next)
}
func (l *queryLexer) lexVoid() queryLexStateFn {
for {
next := l.peek()
switch next {
case '$':
l.pos++
l.emit(tokenDollar)
continue
case '.':
if l.follow("..") {
l.pos += 2
l.emit(tokenDotDot)
} else {
l.pos++
l.emit(tokenDot)
}
continue
case '[':
l.pos++
l.emit(tokenLeftBracket)
continue
case ']':
l.pos++
l.emit(tokenRightBracket)
continue
case ',':
l.pos++
l.emit(tokenComma)
continue
case '*':
l.pos++
l.emit(tokenStar)
continue
case '(':
l.pos++
l.emit(tokenLeftParen)
continue
case ')':
l.pos++
l.emit(tokenRightParen)
continue
case '?':
l.pos++
l.emit(tokenQuestion)
continue
case ':':
l.pos++
l.emit(tokenColon)
continue
case '\'':
l.ignore()
l.stringTerm = string(next)
return l.lexString
case '"':
l.ignore()
l.stringTerm = string(next)
return l.lexString
}
if isSpace(next) {
l.next()
l.ignore()
continue
}
if isAlphanumeric(next) {
return l.lexKey
}
if next == '+' || next == '-' || isDigit(next) {
return l.lexNumber
}
if l.next() == eof {
break
}
return l.errorf("unexpected char: '%v'", next)
}
l.emit(tokenEOF)
return nil
}
func (l *queryLexer) lexKey() queryLexStateFn {
for {
next := l.peek()
if !isAlphanumeric(next) {
l.emit(tokenKey)
return l.lexVoid
}
if l.next() == eof {
break
}
}
l.emit(tokenEOF)
return nil
}
func (l *queryLexer) lexString() queryLexStateFn {
l.pos++
l.ignore()
growingString := ""
for {
if l.follow(l.stringTerm) {
l.emitWithValue(tokenString, growingString)
l.pos++
l.ignore()
return l.lexVoid
}
if l.follow("\\\"") {
l.pos++
growingString += "\""
} else if l.follow("\\'") {
l.pos++
growingString += "'"
} else if l.follow("\\n") {
l.pos++
growingString += "\n"
} else if l.follow("\\b") {
l.pos++
growingString += "\b"
} else if l.follow("\\f") {
l.pos++
growingString += "\f"
} else if l.follow("\\/") {
l.pos++
growingString += "/"
} else if l.follow("\\t") {
l.pos++
growingString += "\t"
} else if l.follow("\\r") {
l.pos++
growingString += "\r"
} else if l.follow("\\\\") {
l.pos++
growingString += "\\"
} else if l.follow("\\u") {
l.pos += 2
code := ""
for i := 0; i < 4; i++ {
c := l.peek()
l.pos++
if !isHexDigit(c) {
return l.errorf("unfinished unicode escape")
}
code = code + string(c)
}
l.pos--
intcode, err := strconv.ParseInt(code, 16, 32)
if err != nil {
return l.errorf("invalid unicode escape: \\u" + code)
}
growingString += string(rune(intcode))
} else if l.follow("\\U") {
l.pos += 2
code := ""
for i := 0; i < 8; i++ {
c := l.peek()
l.pos++
if !isHexDigit(c) {
return l.errorf("unfinished unicode escape")
}
code = code + string(c)
}
l.pos--
intcode, err := strconv.ParseInt(code, 16, 32)
if err != nil {
return l.errorf("invalid unicode escape: \\u" + code)
}
growingString += string(rune(intcode))
} else if l.follow("\\") {
l.pos++
return l.errorf("invalid escape sequence: \\" + string(l.peek()))
} else {
growingString += string(l.peek())
}
if l.next() == eof {
break
}
}
return l.errorf("unclosed string")
}
func (l *queryLexer) lexNumber() queryLexStateFn {
l.ignore()
if !l.accept("+") {
l.accept("-")
}
pointSeen := false
digitSeen := false
for {
next := l.next()
if next == '.' {
if pointSeen {
return l.errorf("cannot have two dots in one float")
}
if !isDigit(l.peek()) {
return l.errorf("float cannot end with a dot")
}
pointSeen = true
} else if isDigit(next) {
digitSeen = true
} else {
l.backup()
break
}
if pointSeen && !digitSeen {
return l.errorf("cannot start float with a dot")
}
}
if !digitSeen {
return l.errorf("no digit in that number")
}
if pointSeen {
l.emit(tokenFloat)
} else {
l.emit(tokenInteger)
}
return l.lexVoid
}
// Entry point
func lexQuery(input string) chan token {
l := &queryLexer{
input: input,
tokens: make(chan token),
line: 1,
col: 1,
}
go l.run()
return l.tokens
}

View File

@ -1,275 +0,0 @@
/*
Based on the "jsonpath" spec/concept.
http://goessner.net/articles/JsonPath/
https://code.google.com/p/json-path/
*/
package toml
import (
"fmt"
)
const maxInt = int(^uint(0) >> 1)
type queryParser struct {
flow chan token
tokensBuffer []token
query *Query
union []pathFn
err error
}
type queryParserStateFn func() queryParserStateFn
// Formats and panics an error message based on a token
func (p *queryParser) parseError(tok *token, msg string, args ...interface{}) queryParserStateFn {
p.err = fmt.Errorf(tok.Position.String()+": "+msg, args...)
return nil // trigger parse to end
}
func (p *queryParser) run() {
for state := p.parseStart; state != nil; {
state = state()
}
}
func (p *queryParser) backup(tok *token) {
p.tokensBuffer = append(p.tokensBuffer, *tok)
}
func (p *queryParser) peek() *token {
if len(p.tokensBuffer) != 0 {
return &(p.tokensBuffer[0])
}
tok, ok := <-p.flow
if !ok {
return nil
}
p.backup(&tok)
return &tok
}
func (p *queryParser) lookahead(types ...tokenType) bool {
result := true
buffer := []token{}
for _, typ := range types {
tok := p.getToken()
if tok == nil {
result = false
break
}
buffer = append(buffer, *tok)
if tok.typ != typ {
result = false
break
}
}
// add the tokens back to the buffer, and return
p.tokensBuffer = append(p.tokensBuffer, buffer...)
return result
}
func (p *queryParser) getToken() *token {
if len(p.tokensBuffer) != 0 {
tok := p.tokensBuffer[0]
p.tokensBuffer = p.tokensBuffer[1:]
return &tok
}
tok, ok := <-p.flow
if !ok {
return nil
}
return &tok
}
func (p *queryParser) parseStart() queryParserStateFn {
tok := p.getToken()
if tok == nil || tok.typ == tokenEOF {
return nil
}
if tok.typ != tokenDollar {
return p.parseError(tok, "Expected '$' at start of expression")
}
return p.parseMatchExpr
}
// handle '.' prefix, '[]', and '..'
func (p *queryParser) parseMatchExpr() queryParserStateFn {
tok := p.getToken()
switch tok.typ {
case tokenDotDot:
p.query.appendPath(&matchRecursiveFn{})
// nested parse for '..'
tok := p.getToken()
switch tok.typ {
case tokenKey:
p.query.appendPath(newMatchKeyFn(tok.val))
return p.parseMatchExpr
case tokenLeftBracket:
return p.parseBracketExpr
case tokenStar:
// do nothing - the recursive predicate is enough
return p.parseMatchExpr
}
case tokenDot:
// nested parse for '.'
tok := p.getToken()
switch tok.typ {
case tokenKey:
p.query.appendPath(newMatchKeyFn(tok.val))
return p.parseMatchExpr
case tokenStar:
p.query.appendPath(&matchAnyFn{})
return p.parseMatchExpr
}
case tokenLeftBracket:
return p.parseBracketExpr
case tokenEOF:
return nil // allow EOF at this stage
}
return p.parseError(tok, "expected match expression")
}
func (p *queryParser) parseBracketExpr() queryParserStateFn {
if p.lookahead(tokenInteger, tokenColon) {
return p.parseSliceExpr
}
if p.peek().typ == tokenColon {
return p.parseSliceExpr
}
return p.parseUnionExpr
}
func (p *queryParser) parseUnionExpr() queryParserStateFn {
var tok *token
// this state can be traversed after some sub-expressions
// so be careful when setting up state in the parser
if p.union == nil {
p.union = []pathFn{}
}
loop: // labeled loop for easy breaking
for {
if len(p.union) > 0 {
// parse delimiter or terminator
tok = p.getToken()
switch tok.typ {
case tokenComma:
// do nothing
case tokenRightBracket:
break loop
default:
return p.parseError(tok, "expected ',' or ']', not '%s'", tok.val)
}
}
// parse sub expression
tok = p.getToken()
switch tok.typ {
case tokenInteger:
p.union = append(p.union, newMatchIndexFn(tok.Int()))
case tokenKey:
p.union = append(p.union, newMatchKeyFn(tok.val))
case tokenString:
p.union = append(p.union, newMatchKeyFn(tok.val))
case tokenQuestion:
return p.parseFilterExpr
default:
return p.parseError(tok, "expected union sub expression, not '%s', %d", tok.val, len(p.union))
}
}
// if there is only one sub-expression, use that instead
if len(p.union) == 1 {
p.query.appendPath(p.union[0])
} else {
p.query.appendPath(&matchUnionFn{p.union})
}
p.union = nil // clear out state
return p.parseMatchExpr
}
func (p *queryParser) parseSliceExpr() queryParserStateFn {
// init slice to grab all elements
start, end, step := 0, maxInt, 1
// parse optional start
tok := p.getToken()
if tok.typ == tokenInteger {
start = tok.Int()
tok = p.getToken()
}
if tok.typ != tokenColon {
return p.parseError(tok, "expected ':'")
}
// parse optional end
tok = p.getToken()
if tok.typ == tokenInteger {
end = tok.Int()
tok = p.getToken()
}
if tok.typ == tokenRightBracket {
p.query.appendPath(newMatchSliceFn(start, end, step))
return p.parseMatchExpr
}
if tok.typ != tokenColon {
return p.parseError(tok, "expected ']' or ':'")
}
// parse optional step
tok = p.getToken()
if tok.typ == tokenInteger {
step = tok.Int()
if step < 0 {
return p.parseError(tok, "step must be a positive value")
}
tok = p.getToken()
}
if tok.typ != tokenRightBracket {
return p.parseError(tok, "expected ']'")
}
p.query.appendPath(newMatchSliceFn(start, end, step))
return p.parseMatchExpr
}
func (p *queryParser) parseFilterExpr() queryParserStateFn {
tok := p.getToken()
if tok.typ != tokenLeftParen {
return p.parseError(tok, "expected left-parenthesis for filter expression")
}
tok = p.getToken()
if tok.typ != tokenKey && tok.typ != tokenString {
return p.parseError(tok, "expected key or string for filter funciton name")
}
name := tok.val
tok = p.getToken()
if tok.typ != tokenRightParen {
return p.parseError(tok, "expected right-parenthesis for filter expression")
}
p.union = append(p.union, newMatchFilterFn(name, tok.Position))
return p.parseUnionExpr
}
func parseQuery(flow chan token) (*Query, error) {
parser := &queryParser{
flow: flow,
tokensBuffer: []token{},
query: newQuery(),
}
parser.run()
return parser.query, parser.err
}

Some files were not shown because too many files have changed in this diff Show More