mirror of
https://github.com/scratchfoundation/golangci-lint.git
synced 2025-08-28 22:28:43 -04:00
Use the newest go vet
The newest go vet based on go/analysis
This commit is contained in:
parent
7289a90245
commit
8c1237b667
85 changed files with 6716 additions and 4595 deletions
|
@ -81,6 +81,15 @@ linters-settings:
|
|||
govet:
|
||||
# report about shadowed variables
|
||||
check-shadowing: true
|
||||
|
||||
# settings per analyzer
|
||||
settings:
|
||||
printf: # analyzer name, run `go tool vet help` to see all analyzers
|
||||
funcs: # run `go tool vet help printf` to see available settings for `printf` analyzer
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf
|
||||
golint:
|
||||
# minimal confidence for issues, default is 0.8
|
||||
min-confidence: 0.8
|
||||
|
|
|
@ -1,6 +1,13 @@
|
|||
linters-settings:
|
||||
govet:
|
||||
check-shadowing: true
|
||||
settings:
|
||||
printf:
|
||||
funcs:
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf
|
||||
golint:
|
||||
min-confidence: 0
|
||||
gocyclo:
|
||||
|
@ -42,6 +49,7 @@ linters:
|
|||
run:
|
||||
skip-dirs:
|
||||
- test/testdata_etc
|
||||
- pkg/golinters/goanalysis/(checker|passes)
|
||||
|
||||
issues:
|
||||
exclude-rules:
|
||||
|
|
4
Makefile
4
Makefile
|
@ -1,8 +1,8 @@
|
|||
test:
|
||||
go build -o golangci-lint ./cmd/golangci-lint
|
||||
GL_TEST_RUN=1 ./golangci-lint run -v
|
||||
GL_TEST_RUN=1 ./golangci-lint run --fast --no-config -v --skip-dirs test/testdata_etc
|
||||
GL_TEST_RUN=1 ./golangci-lint run --no-config -v --skip-dirs test/testdata_etc
|
||||
GL_TEST_RUN=1 ./golangci-lint run --fast --no-config -v --skip-dirs 'test/testdata_etc,pkg/golinters/goanalysis/(checker|passes)'
|
||||
GL_TEST_RUN=1 ./golangci-lint run --no-config -v --skip-dirs 'test/testdata_etc,pkg/golinters/goanalysis/(checker|passes)'
|
||||
GL_TEST_RUN=1 go test -v ./...
|
||||
|
||||
test_race:
|
||||
|
|
19
README.md
19
README.md
|
@ -185,7 +185,7 @@ GolangCI-Lint can be used with zero configuration. By default the following lint
|
|||
```bash
|
||||
$ golangci-lint help linters
|
||||
Enabled by default linters:
|
||||
govet (vet, vetshadow): Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string [fast: true, auto-fix: false]
|
||||
govet (vet, vetshadow): Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string [fast: false, auto-fix: false]
|
||||
errcheck: Errcheck is a program for checking for unchecked errors in go programs. These unchecked errors can be critical bugs in some cases [fast: true, auto-fix: false]
|
||||
staticcheck: Staticcheck is a go vet on steroids, applying a ton of static analysis checks [fast: false, auto-fix: false]
|
||||
unused: Checks Go code for unused constants, variables, functions and types [fast: false, auto-fix: false]
|
||||
|
@ -612,6 +612,15 @@ linters-settings:
|
|||
govet:
|
||||
# report about shadowed variables
|
||||
check-shadowing: true
|
||||
|
||||
# settings per analyzer
|
||||
settings:
|
||||
printf: # analyzer name, run `go tool vet help` to see all analyzers
|
||||
funcs: # run `go tool vet help printf` to see available settings for `printf` analyzer
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf
|
||||
golint:
|
||||
# minimal confidence for issues, default is 0.8
|
||||
min-confidence: 0.8
|
||||
|
@ -785,6 +794,13 @@ than the default and have more strict settings:
|
|||
linters-settings:
|
||||
govet:
|
||||
check-shadowing: true
|
||||
settings:
|
||||
printf:
|
||||
funcs:
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf
|
||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf
|
||||
golint:
|
||||
min-confidence: 0
|
||||
gocyclo:
|
||||
|
@ -826,6 +842,7 @@ linters:
|
|||
run:
|
||||
skip-dirs:
|
||||
- test/testdata_etc
|
||||
- pkg/golinters/goanalysis/(checker|passes)
|
||||
|
||||
issues:
|
||||
exclude-rules:
|
||||
|
|
3
go.mod
3
go.mod
|
@ -19,7 +19,6 @@ require (
|
|||
github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee
|
||||
github.com/golangci/gofmt v0.0.0-20181105071733-0b8337e80d98
|
||||
github.com/golangci/gosec v0.0.0-20180901114220-66fb7fc33547
|
||||
github.com/golangci/govet v0.0.0-20180818181408-44ddbe260190
|
||||
github.com/golangci/ineffassign v0.0.0-20180808204949-2ee8f2867dde
|
||||
github.com/golangci/lint-1 v0.0.0-20180610141402-4bf9709227d1
|
||||
github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca
|
||||
|
@ -52,7 +51,7 @@ require (
|
|||
golang.org/x/crypto v0.0.0-20190313024323-a1f597ede03a // indirect
|
||||
golang.org/x/net v0.0.0-20190313220215-9f648a60d977 // indirect
|
||||
golang.org/x/sys v0.0.0-20190312061237-fead79001313 // indirect
|
||||
golang.org/x/tools v0.0.0-20190314010720-1286b2016bb1
|
||||
golang.org/x/tools v0.0.0-20190314010720-f0bfdbff1f9c
|
||||
gopkg.in/airbrake/gobrake.v2 v2.0.9 // indirect
|
||||
gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2 // indirect
|
||||
gopkg.in/yaml.v2 v2.2.1
|
||||
|
|
10
go.sum
10
go.sum
|
@ -58,8 +58,6 @@ github.com/golangci/gofmt v0.0.0-20181105071733-0b8337e80d98 h1:ir6/L2ZOJfFrJlOT
|
|||
github.com/golangci/gofmt v0.0.0-20181105071733-0b8337e80d98/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU=
|
||||
github.com/golangci/gosec v0.0.0-20180901114220-66fb7fc33547 h1:qMomh8bv+kDazm1dSLZ9S3zZ2PJZMHL4ilfBjxFOlmI=
|
||||
github.com/golangci/gosec v0.0.0-20180901114220-66fb7fc33547/go.mod h1:0qUabqiIQgfmlAmulqxyiGkkyF6/tOGSnY2cnPVwrzU=
|
||||
github.com/golangci/govet v0.0.0-20180818181408-44ddbe260190 h1:SLIgprnxQNjBpkz55PK1vfb64/gKU/TgVi0obFw8Lec=
|
||||
github.com/golangci/govet v0.0.0-20180818181408-44ddbe260190/go.mod h1:pPwb+AK755h3/r73avHz5bEN6sa51/2HEZlLaV53hCo=
|
||||
github.com/golangci/ineffassign v0.0.0-20180808204949-2ee8f2867dde h1:qEGp3ZF1Qw6TkbWKn6GdJ12Ssu/CpJBaBcJ4hrUjrSo=
|
||||
github.com/golangci/ineffassign v0.0.0-20180808204949-2ee8f2867dde/go.mod h1:e5tpTHCfVze+7EpLEozzMB3eafxo2KT5veNg1k6byQU=
|
||||
github.com/golangci/lint-1 v0.0.0-20180610141402-4bf9709227d1 h1:PHK2kIh21Zt4IcG0bBRzQwEDVKF64LnkoSXnm8lfJUk=
|
||||
|
@ -146,8 +144,6 @@ github.com/spf13/viper v1.0.2 h1:Ncr3ZIuJn322w2k1qmzXDnkLAdQMlJqBa9kfAH+irso=
|
|||
github.com/spf13/viper v1.0.2/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7SrnBM=
|
||||
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
golang.org/x/crypto v0.0.0-20180505025534-4ec37c66abab h1:w4c/LoOA2vE8SYwh8wEEQVRUwpph7TtcjH7AtZvOjy0=
|
||||
golang.org/x/crypto v0.0.0-20180505025534-4ec37c66abab/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190313024323-a1f597ede03a h1:YX8ljsm6wXlHZO+aRz9Exqr0evNhKRNe5K/gi+zKh4U=
|
||||
golang.org/x/crypto v0.0.0-20190313024323-a1f597ede03a/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
|
@ -173,10 +169,8 @@ golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGm
|
|||
golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20181205014116-22934f0fdb62/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190121143147-24cd39ecf745/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190125232054-379209517ffe h1:ZJ3JgA0fnPnX6nSjHp3y5XWNUf3zaTbWlilINJoPFkQ=
|
||||
golang.org/x/tools v0.0.0-20190125232054-379209517ffe/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190314010720-1286b2016bb1 h1:bVqQ31OV9908eHt6CNzw09jKOqi34qHsp4nsinaxiuw=
|
||||
golang.org/x/tools v0.0.0-20190314010720-1286b2016bb1/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190314010720-f0bfdbff1f9c h1:nE2ID2IbO0sUUG/3vWMz0LStAvkaW9wpnFp/65bxJw8=
|
||||
golang.org/x/tools v0.0.0-20190314010720-f0bfdbff1f9c/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
gopkg.in/airbrake/gobrake.v2 v2.0.9 h1:7z2uVWwn7oVeeugY1DtlPAy5H+KYgB1KeKTnqjNatLo=
|
||||
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
|
|
@ -39,7 +39,7 @@ func NewExecutor(version, commit, date string) *Executor {
|
|||
version: version,
|
||||
commit: commit,
|
||||
date: date,
|
||||
DBManager: lintersdb.NewManager(),
|
||||
DBManager: lintersdb.NewManager(nil),
|
||||
}
|
||||
|
||||
e.log = report.NewLogWrapper(logutils.NewStderrLog(""), &e.reportData)
|
||||
|
@ -82,6 +82,9 @@ func NewExecutor(version, commit, date string) *Executor {
|
|||
e.log.Fatalf("Can't read config: %s", err)
|
||||
}
|
||||
|
||||
// recreate after getting config
|
||||
e.DBManager = lintersdb.NewManager(e.cfg)
|
||||
|
||||
e.cfg.LintersSettings.Gocritic.InferEnabledChecks(e.log)
|
||||
if err := e.cfg.LintersSettings.Gocritic.Validate(e.log); err != nil {
|
||||
e.log.Fatalf("Invalid gocritic settings: %s", err)
|
||||
|
|
|
@ -278,7 +278,8 @@ func (e *Executor) runAnalysis(ctx context.Context, args []string) (<-chan resul
|
|||
}
|
||||
lintCtx.Log = e.log.Child("linters context")
|
||||
|
||||
runner, err := lint.NewRunner(lintCtx.ASTCache, e.cfg, e.log.Child("runner"), e.goenv, e.lineCache)
|
||||
runner, err := lint.NewRunner(lintCtx.ASTCache, e.cfg, e.log.Child("runner"),
|
||||
e.goenv, e.lineCache, e.DBManager)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -121,9 +121,7 @@ type Run struct {
|
|||
}
|
||||
|
||||
type LintersSettings struct {
|
||||
Govet struct {
|
||||
CheckShadowing bool `mapstructure:"check-shadowing"`
|
||||
}
|
||||
Govet GovetSettings
|
||||
Golint struct {
|
||||
MinConfidence float64 `mapstructure:"min-confidence"`
|
||||
}
|
||||
|
@ -173,6 +171,11 @@ type LintersSettings struct {
|
|||
Gocritic GocriticSettings
|
||||
}
|
||||
|
||||
type GovetSettings struct {
|
||||
CheckShadowing bool `mapstructure:"check-shadowing"`
|
||||
Settings map[string]map[string]interface{}
|
||||
}
|
||||
|
||||
type ErrcheckSettings struct {
|
||||
CheckTypeAssertions bool `mapstructure:"check-type-assertions"`
|
||||
CheckAssignToBlank bool `mapstructure:"check-blank"`
|
||||
|
|
137
pkg/golinters/goanalysis/linter.go
Normal file
137
pkg/golinters/goanalysis/linter.go
Normal file
|
@ -0,0 +1,137 @@
|
|||
package goanalysis
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"golang.org/x/tools/go/analysis"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/golinters/goanalysis/checker"
|
||||
"github.com/golangci/golangci-lint/pkg/lint/linter"
|
||||
"github.com/golangci/golangci-lint/pkg/result"
|
||||
)
|
||||
|
||||
type Linter struct {
|
||||
name, desc string
|
||||
analyzers []*analysis.Analyzer
|
||||
cfg map[string]map[string]interface{}
|
||||
}
|
||||
|
||||
func NewLinter(name, desc string, analyzers []*analysis.Analyzer, cfg map[string]map[string]interface{}) *Linter {
|
||||
return &Linter{name: name, desc: desc, analyzers: analyzers, cfg: cfg}
|
||||
}
|
||||
|
||||
func (lnt Linter) Name() string {
|
||||
return lnt.name
|
||||
}
|
||||
|
||||
func (lnt Linter) Desc() string {
|
||||
return lnt.desc
|
||||
}
|
||||
|
||||
func (lnt Linter) allAnalyzerNames() []string {
|
||||
var ret []string
|
||||
for _, a := range lnt.analyzers {
|
||||
ret = append(ret, a.Name)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
func allFlagNames(fs *flag.FlagSet) []string {
|
||||
var ret []string
|
||||
fs.VisitAll(func(f *flag.Flag) {
|
||||
ret = append(ret, f.Name)
|
||||
})
|
||||
return ret
|
||||
}
|
||||
|
||||
func valueToString(v interface{}) string {
|
||||
if ss, ok := v.([]string); ok {
|
||||
return strings.Join(ss, ",")
|
||||
}
|
||||
|
||||
if is, ok := v.([]interface{}); ok {
|
||||
var ss []string
|
||||
for _, i := range is {
|
||||
ss = append(ss, fmt.Sprint(i))
|
||||
}
|
||||
return valueToString(ss)
|
||||
}
|
||||
|
||||
return fmt.Sprint(v)
|
||||
}
|
||||
|
||||
func (lnt Linter) configureAnalyzer(a *analysis.Analyzer, cfg map[string]interface{}) error {
|
||||
for k, v := range cfg {
|
||||
f := a.Flags.Lookup(k)
|
||||
if f == nil {
|
||||
validFlagNames := allFlagNames(&a.Flags)
|
||||
if len(validFlagNames) == 0 {
|
||||
return fmt.Errorf("analyzer doesn't have settings")
|
||||
}
|
||||
|
||||
return fmt.Errorf("analyzer doesn't have setting %q, valid settings: %v",
|
||||
k, validFlagNames)
|
||||
}
|
||||
|
||||
if err := f.Value.Set(valueToString(v)); err != nil {
|
||||
return errors.Wrapf(err, "failed to set analyzer setting %q with value %v", k, v)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lnt Linter) configure() error {
|
||||
analyzersMap := map[string]*analysis.Analyzer{}
|
||||
for _, a := range lnt.analyzers {
|
||||
analyzersMap[a.Name] = a
|
||||
}
|
||||
|
||||
for analyzerName, analyzerSettings := range lnt.cfg {
|
||||
a := analyzersMap[analyzerName]
|
||||
if a == nil {
|
||||
return fmt.Errorf("settings key %q must be valid analyzer name, valid analyzers: %v",
|
||||
analyzerName, lnt.allAnalyzerNames())
|
||||
}
|
||||
|
||||
if err := lnt.configureAnalyzer(a, analyzerSettings); err != nil {
|
||||
return errors.Wrapf(err, "failed to configure analyzer %s", analyzerName)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lnt Linter) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) {
|
||||
if err := analysis.Validate(lnt.analyzers); err != nil {
|
||||
return nil, errors.Wrap(err, "failed to validate analyzers")
|
||||
}
|
||||
|
||||
if err := lnt.configure(); err != nil {
|
||||
return nil, errors.Wrap(err, "failed to configure analyzers")
|
||||
}
|
||||
|
||||
diags, errs := checker.Run(lnt.analyzers, lintCtx.Packages)
|
||||
for i := 1; i < len(errs); i++ {
|
||||
lintCtx.Log.Warnf("%s error: %s", lnt.Name(), errs[i])
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, errs[0]
|
||||
}
|
||||
|
||||
var issues []result.Issue
|
||||
for _, diag := range diags {
|
||||
i := result.Issue{
|
||||
FromLinter: lnt.Name(),
|
||||
Text: fmt.Sprintf("%s: %s", diag.AnalyzerName, diag.Message),
|
||||
Pos: diag.Position,
|
||||
}
|
||||
issues = append(issues, i)
|
||||
}
|
||||
|
||||
return issues, nil
|
||||
}
|
275
pkg/golinters/goanalysis/passes/nilness/nilness.go
Normal file
275
pkg/golinters/goanalysis/passes/nilness/nilness.go
Normal file
|
@ -0,0 +1,275 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package nilness inspects the control-flow graph of an SSA function
|
||||
// and reports errors such as nil pointer dereferences and degenerate
|
||||
// nil pointer comparisons.
|
||||
|
||||
// This is a copy of https://github.com/golang/tools/blob/master/go/analysis/passes/nilness/nilness.go
|
||||
// from the commit f0bfdbff1f9c986484a9f02fc198b1efcfe76ebe.
|
||||
// Can't use the original one because of https://github.com/golang/go/issues/29612
|
||||
package nilness
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/token"
|
||||
"go/types"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/buildssa"
|
||||
"golang.org/x/tools/go/ssa"
|
||||
)
|
||||
|
||||
const Doc = `check for redundant or impossible nil comparisons
|
||||
|
||||
The nilness checker inspects the control-flow graph of each function in
|
||||
a package and reports nil pointer dereferences and degenerate nil
|
||||
pointers. A degenerate comparison is of the form x==nil or x!=nil where x
|
||||
is statically known to be nil or non-nil. These are often a mistake,
|
||||
especially in control flow related to errors.
|
||||
|
||||
This check reports conditions such as:
|
||||
|
||||
if f == nil { // impossible condition (f is a function)
|
||||
}
|
||||
|
||||
and:
|
||||
|
||||
p := &v
|
||||
...
|
||||
if p != nil { // tautological condition
|
||||
}
|
||||
|
||||
and:
|
||||
|
||||
if p == nil {
|
||||
print(*p) // nil dereference
|
||||
}
|
||||
`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "nilness",
|
||||
Doc: Doc,
|
||||
Run: run,
|
||||
Requires: []*analysis.Analyzer{buildssa.Analyzer},
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
|
||||
for _, fn := range ssainput.SrcFuncs {
|
||||
runFunc(pass, fn)
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func runFunc(pass *analysis.Pass, fn *ssa.Function) {
|
||||
reportf := func(category string, pos token.Pos, format string, args ...interface{}) {
|
||||
pass.Report(analysis.Diagnostic{
|
||||
Pos: pos,
|
||||
Category: category,
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
})
|
||||
}
|
||||
|
||||
// notNil reports an error if v is provably nil.
|
||||
notNil := func(stack []fact, instr ssa.Instruction, v ssa.Value, descr string) {
|
||||
if nilnessOf(stack, v) == isnil {
|
||||
reportf("nilderef", instr.Pos(), "nil dereference in "+descr)
|
||||
}
|
||||
}
|
||||
|
||||
// visit visits reachable blocks of the CFG in dominance order,
|
||||
// maintaining a stack of dominating nilness facts.
|
||||
//
|
||||
// By traversing the dom tree, we can pop facts off the stack as
|
||||
// soon as we've visited a subtree. Had we traversed the CFG,
|
||||
// we would need to retain the set of facts for each block.
|
||||
seen := make([]bool, len(fn.Blocks)) // seen[i] means visit should ignore block i
|
||||
var visit func(b *ssa.BasicBlock, stack []fact)
|
||||
visit = func(b *ssa.BasicBlock, stack []fact) {
|
||||
if seen[b.Index] {
|
||||
return
|
||||
}
|
||||
seen[b.Index] = true
|
||||
|
||||
// Report nil dereferences.
|
||||
for _, instr := range b.Instrs {
|
||||
switch instr := instr.(type) {
|
||||
case ssa.CallInstruction:
|
||||
notNil(stack, instr, instr.Common().Value,
|
||||
instr.Common().Description())
|
||||
case *ssa.FieldAddr:
|
||||
notNil(stack, instr, instr.X, "field selection")
|
||||
case *ssa.IndexAddr:
|
||||
notNil(stack, instr, instr.X, "index operation")
|
||||
case *ssa.MapUpdate:
|
||||
notNil(stack, instr, instr.Map, "map update")
|
||||
case *ssa.Slice:
|
||||
// A nilcheck occurs in ptr[:] iff ptr is a pointer to an array.
|
||||
if _, ok := instr.X.Type().Underlying().(*types.Pointer); ok {
|
||||
notNil(stack, instr, instr.X, "slice operation")
|
||||
}
|
||||
case *ssa.Store:
|
||||
notNil(stack, instr, instr.Addr, "store")
|
||||
case *ssa.TypeAssert:
|
||||
notNil(stack, instr, instr.X, "type assertion")
|
||||
case *ssa.UnOp:
|
||||
if instr.Op == token.MUL { // *X
|
||||
notNil(stack, instr, instr.X, "load")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For nil comparison blocks, report an error if the condition
|
||||
// is degenerate, and push a nilness fact on the stack when
|
||||
// visiting its true and false successor blocks.
|
||||
if binop, tsucc, fsucc := eq(b); binop != nil {
|
||||
xnil := nilnessOf(stack, binop.X)
|
||||
ynil := nilnessOf(stack, binop.Y)
|
||||
|
||||
if ynil != unknown && xnil != unknown && (xnil == isnil || ynil == isnil) {
|
||||
// Degenerate condition:
|
||||
// the nilness of both operands is known,
|
||||
// and at least one of them is nil.
|
||||
var adj string
|
||||
if (xnil == ynil) == (binop.Op == token.EQL) {
|
||||
adj = "tautological"
|
||||
} else {
|
||||
adj = "impossible"
|
||||
}
|
||||
reportf("cond", binop.Pos(), "%s condition: %s %s %s", adj, xnil, binop.Op, ynil)
|
||||
|
||||
// If tsucc's or fsucc's sole incoming edge is impossible,
|
||||
// it is unreachable. Prune traversal of it and
|
||||
// all the blocks it dominates.
|
||||
// (We could be more precise with full dataflow
|
||||
// analysis of control-flow joins.)
|
||||
var skip *ssa.BasicBlock
|
||||
if xnil == ynil {
|
||||
skip = fsucc
|
||||
} else {
|
||||
skip = tsucc
|
||||
}
|
||||
for _, d := range b.Dominees() {
|
||||
if d == skip && len(d.Preds) == 1 {
|
||||
continue
|
||||
}
|
||||
visit(d, stack)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// "if x == nil" or "if nil == y" condition; x, y are unknown.
|
||||
if xnil == isnil || ynil == isnil {
|
||||
var f fact
|
||||
if xnil == isnil {
|
||||
// x is nil, y is unknown:
|
||||
// t successor learns y is nil.
|
||||
f = fact{binop.Y, isnil}
|
||||
} else {
|
||||
// x is nil, y is unknown:
|
||||
// t successor learns x is nil.
|
||||
f = fact{binop.X, isnil}
|
||||
}
|
||||
|
||||
for _, d := range b.Dominees() {
|
||||
// Successor blocks learn a fact
|
||||
// only at non-critical edges.
|
||||
// (We could do be more precise with full dataflow
|
||||
// analysis of control-flow joins.)
|
||||
s := stack
|
||||
if len(d.Preds) == 1 {
|
||||
if d == tsucc {
|
||||
s = append(s, f)
|
||||
} else if d == fsucc {
|
||||
s = append(s, f.negate())
|
||||
}
|
||||
}
|
||||
visit(d, s)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
for _, d := range b.Dominees() {
|
||||
visit(d, stack)
|
||||
}
|
||||
}
|
||||
|
||||
// Visit the entry block. No need to visit fn.Recover.
|
||||
if fn.Blocks != nil {
|
||||
visit(fn.Blocks[0], make([]fact, 0, 20)) // 20 is plenty
|
||||
}
|
||||
}
|
||||
|
||||
// A fact records that a block is dominated
|
||||
// by the condition v == nil or v != nil.
|
||||
type fact struct {
|
||||
value ssa.Value
|
||||
nilness nilness
|
||||
}
|
||||
|
||||
func (f fact) negate() fact { return fact{f.value, -f.nilness} }
|
||||
|
||||
type nilness int
|
||||
|
||||
const (
|
||||
isnonnil = -1
|
||||
unknown nilness = 0
|
||||
isnil = 1
|
||||
)
|
||||
|
||||
var nilnessStrings = []string{"non-nil", "unknown", "nil"}
|
||||
|
||||
func (n nilness) String() string { return nilnessStrings[n+1] }
|
||||
|
||||
// nilnessOf reports whether v is definitely nil, definitely not nil,
|
||||
// or unknown given the dominating stack of facts.
|
||||
func nilnessOf(stack []fact, v ssa.Value) nilness {
|
||||
// Is value intrinsically nil or non-nil?
|
||||
switch v := v.(type) {
|
||||
case *ssa.Alloc,
|
||||
*ssa.FieldAddr,
|
||||
*ssa.FreeVar,
|
||||
*ssa.Function,
|
||||
*ssa.Global,
|
||||
*ssa.IndexAddr,
|
||||
*ssa.MakeChan,
|
||||
*ssa.MakeClosure,
|
||||
*ssa.MakeInterface,
|
||||
*ssa.MakeMap,
|
||||
*ssa.MakeSlice:
|
||||
return isnonnil
|
||||
case *ssa.Const:
|
||||
if v.IsNil() {
|
||||
return isnil
|
||||
} else {
|
||||
return isnonnil
|
||||
}
|
||||
}
|
||||
|
||||
// Search dominating control-flow facts.
|
||||
for _, f := range stack {
|
||||
if f.value == v {
|
||||
return f.nilness
|
||||
}
|
||||
}
|
||||
return unknown
|
||||
}
|
||||
|
||||
// If b ends with an equality comparison, eq returns the operation and
|
||||
// its true (equal) and false (not equal) successors.
|
||||
func eq(b *ssa.BasicBlock) (op *ssa.BinOp, tsucc, fsucc *ssa.BasicBlock) {
|
||||
if If, ok := b.Instrs[len(b.Instrs)-1].(*ssa.If); ok {
|
||||
if binop, ok := If.Cond.(*ssa.BinOp); ok {
|
||||
switch binop.Op {
|
||||
case token.EQL:
|
||||
return binop, b.Succs[0], b.Succs[1]
|
||||
case token.NEQ:
|
||||
return binop, b.Succs[1], b.Succs[0]
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil, nil
|
||||
}
|
|
@ -1,70 +1,87 @@
|
|||
package golinters
|
||||
|
||||
import (
|
||||
"context"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"golang.org/x/tools/go/analysis"
|
||||
|
||||
govetAPI "github.com/golangci/govet"
|
||||
"github.com/golangci/golangci-lint/pkg/config"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/fsutils"
|
||||
"github.com/golangci/golangci-lint/pkg/lint/linter"
|
||||
"github.com/golangci/golangci-lint/pkg/result"
|
||||
"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
|
||||
|
||||
// analysis plug-ins
|
||||
"golang.org/x/tools/go/analysis/passes/asmdecl"
|
||||
"golang.org/x/tools/go/analysis/passes/assign"
|
||||
"golang.org/x/tools/go/analysis/passes/atomic"
|
||||
"golang.org/x/tools/go/analysis/passes/atomicalign"
|
||||
"golang.org/x/tools/go/analysis/passes/bools"
|
||||
"golang.org/x/tools/go/analysis/passes/buildtag"
|
||||
"golang.org/x/tools/go/analysis/passes/cgocall"
|
||||
"golang.org/x/tools/go/analysis/passes/composite"
|
||||
"golang.org/x/tools/go/analysis/passes/copylock"
|
||||
"golang.org/x/tools/go/analysis/passes/httpresponse"
|
||||
"golang.org/x/tools/go/analysis/passes/loopclosure"
|
||||
"golang.org/x/tools/go/analysis/passes/lostcancel"
|
||||
"golang.org/x/tools/go/analysis/passes/nilfunc"
|
||||
"golang.org/x/tools/go/analysis/passes/printf"
|
||||
"golang.org/x/tools/go/analysis/passes/shadow"
|
||||
"golang.org/x/tools/go/analysis/passes/shift"
|
||||
"golang.org/x/tools/go/analysis/passes/stdmethods"
|
||||
"golang.org/x/tools/go/analysis/passes/structtag"
|
||||
"golang.org/x/tools/go/analysis/passes/tests"
|
||||
"golang.org/x/tools/go/analysis/passes/unmarshal"
|
||||
"golang.org/x/tools/go/analysis/passes/unreachable"
|
||||
"golang.org/x/tools/go/analysis/passes/unsafeptr"
|
||||
"golang.org/x/tools/go/analysis/passes/unusedresult"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/golinters/goanalysis/passes/nilness"
|
||||
)
|
||||
|
||||
type Govet struct{}
|
||||
func NewGovet(cfg *config.GovetSettings) *goanalysis.Linter {
|
||||
analyzers := []*analysis.Analyzer{
|
||||
// the traditional vet suite:
|
||||
asmdecl.Analyzer,
|
||||
assign.Analyzer,
|
||||
atomic.Analyzer,
|
||||
atomicalign.Analyzer,
|
||||
bools.Analyzer,
|
||||
buildtag.Analyzer,
|
||||
cgocall.Analyzer,
|
||||
composite.Analyzer,
|
||||
copylock.Analyzer,
|
||||
httpresponse.Analyzer,
|
||||
loopclosure.Analyzer,
|
||||
lostcancel.Analyzer,
|
||||
nilfunc.Analyzer,
|
||||
printf.Analyzer,
|
||||
shift.Analyzer,
|
||||
stdmethods.Analyzer,
|
||||
structtag.Analyzer,
|
||||
tests.Analyzer,
|
||||
unmarshal.Analyzer,
|
||||
unreachable.Analyzer,
|
||||
unsafeptr.Analyzer,
|
||||
unusedresult.Analyzer,
|
||||
|
||||
func (Govet) Name() string {
|
||||
return "govet"
|
||||
}
|
||||
// for debugging:
|
||||
// findcall.Analyzer,
|
||||
// pkgfact.Analyzer,
|
||||
|
||||
func (Govet) Desc() string {
|
||||
return "Vet examines Go source code and reports suspicious constructs, " +
|
||||
"such as Printf calls whose arguments do not align with the format string"
|
||||
}
|
||||
|
||||
func (g Govet) Run(_ context.Context, lintCtx *linter.Context) ([]result.Issue, error) {
|
||||
var govetIssues []govetAPI.Issue
|
||||
var err error
|
||||
govetIssues, err = g.runImpl(lintCtx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
// uses SSA:
|
||||
nilness.Analyzer,
|
||||
}
|
||||
|
||||
if len(govetIssues) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
res := make([]result.Issue, 0, len(govetIssues))
|
||||
for _, i := range govetIssues {
|
||||
res = append(res, result.Issue{
|
||||
Pos: i.Pos,
|
||||
Text: i.Message,
|
||||
FromLinter: g.Name(),
|
||||
})
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (g Govet) runImpl(lintCtx *linter.Context) ([]govetAPI.Issue, error) {
|
||||
// TODO: check .S asm files: govet can do it if pass dirs
|
||||
var govetIssues []govetAPI.Issue
|
||||
for _, pkg := range lintCtx.Program.InitialPackages() {
|
||||
if len(pkg.Files) == 0 {
|
||||
continue
|
||||
var settings map[string]map[string]interface{}
|
||||
if cfg != nil {
|
||||
if cfg.CheckShadowing {
|
||||
analyzers = append(analyzers, shadow.Analyzer)
|
||||
}
|
||||
|
||||
issues, err := govetAPI.Analyze(pkg.Files, lintCtx.Program.Fset, pkg,
|
||||
lintCtx.Settings().Govet.CheckShadowing, getPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
govetIssues = append(govetIssues, issues...)
|
||||
settings = cfg.Settings
|
||||
}
|
||||
|
||||
return govetIssues, nil
|
||||
}
|
||||
|
||||
func getPath(f *ast.File, fset *token.FileSet) (string, error) {
|
||||
return fsutils.ShortestRelPath(fset.Position(f.Pos()).Filename, "")
|
||||
return goanalysis.NewLinter(
|
||||
"govet",
|
||||
"Vet examines Go source code and reports suspicious constructs, "+
|
||||
"such as Printf calls whose arguments do not align with the format string",
|
||||
analyzers,
|
||||
settings,
|
||||
)
|
||||
}
|
||||
|
|
|
@ -91,7 +91,7 @@ func TestGetEnabledLintersSet(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
m := NewManager()
|
||||
m := NewManager(nil)
|
||||
es := NewEnabledSet(m, NewValidator(m), nil, nil)
|
||||
for _, c := range cases {
|
||||
c := c
|
||||
|
|
|
@ -3,16 +3,19 @@ package lintersdb
|
|||
import (
|
||||
"os"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/config"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/golinters"
|
||||
"github.com/golangci/golangci-lint/pkg/lint/linter"
|
||||
)
|
||||
|
||||
type Manager struct {
|
||||
nameToLC map[string]*linter.Config
|
||||
cfg *config.Config
|
||||
}
|
||||
|
||||
func NewManager() *Manager {
|
||||
m := &Manager{}
|
||||
func NewManager(cfg *config.Config) *Manager {
|
||||
m := &Manager{cfg: cfg}
|
||||
nameToLC := make(map[string]*linter.Config)
|
||||
for _, lc := range m.GetAllSupportedLinterConfigs() {
|
||||
for _, name := range lc.AllNames() {
|
||||
|
@ -74,10 +77,14 @@ func (Manager) GetMetaLinters() map[string]linter.MetaLinter {
|
|||
return ret
|
||||
}
|
||||
|
||||
func (Manager) GetAllSupportedLinterConfigs() []*linter.Config {
|
||||
func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config {
|
||||
var govetCfg *config.GovetSettings
|
||||
if m.cfg != nil {
|
||||
govetCfg = &m.cfg.LintersSettings.Govet
|
||||
}
|
||||
lcs := []*linter.Config{
|
||||
linter.NewConfig(golinters.Govet{}).
|
||||
WithTypeInfo().
|
||||
linter.NewConfig(golinters.NewGovet(govetCfg)).
|
||||
WithSSA(). // TODO: extract from the linter config and don't build SSA, just use LoadAllSyntax mode
|
||||
WithPresets(linter.PresetBugs).
|
||||
WithSpeed(4).
|
||||
WithAlternativeNames("vet", "vetshadow").
|
||||
|
@ -229,7 +236,7 @@ func (Manager) GetAllSupportedLinterConfigs() []*linter.Config {
|
|||
|
||||
isLocalRun := os.Getenv("GOLANGCI_COM_RUN") == ""
|
||||
enabledByDefault := map[string]bool{
|
||||
golinters.Govet{}.Name(): true,
|
||||
golinters.NewGovet(nil).Name(): true,
|
||||
golinters.Errcheck{}.Name(): true,
|
||||
golinters.Staticcheck{}.Name(): true,
|
||||
golinters.Unused{}.Name(): true,
|
||||
|
|
|
@ -9,6 +9,8 @@ import (
|
|||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/lint/lintersdb"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/fsutils"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/config"
|
||||
|
@ -28,7 +30,7 @@ type Runner struct {
|
|||
}
|
||||
|
||||
func NewRunner(astCache *astcache.Cache, cfg *config.Config, log logutils.Log, goenv *goutil.Env,
|
||||
lineCache *fsutils.LineCache) (*Runner, error) {
|
||||
lineCache *fsutils.LineCache, dbManager *lintersdb.Manager) (*Runner, error) {
|
||||
|
||||
icfg := cfg.Issues
|
||||
excludePatterns := icfg.ExcludePatterns
|
||||
|
@ -74,7 +76,7 @@ func NewRunner(astCache *astcache.Cache, cfg *config.Config, log logutils.Log, g
|
|||
processors.NewIdentifierMarker(), // must be befor exclude
|
||||
processors.NewExclude(excludeTotalPattern),
|
||||
processors.NewExcludeRules(excludeRules, lineCache, log.Child("exclude_rules")),
|
||||
processors.NewNolint(astCache, log.Child("nolint")),
|
||||
processors.NewNolint(astCache, log.Child("nolint"), dbManager),
|
||||
|
||||
processors.NewUniqByLine(),
|
||||
processors.NewDiff(icfg.Diff, icfg.DiffFromRevision, icfg.DiffPatchFilePath),
|
||||
|
|
|
@ -14,8 +14,8 @@ type Replacement struct {
|
|||
type Issue struct {
|
||||
FromLinter string
|
||||
Text string
|
||||
Pos token.Position
|
||||
|
||||
Pos token.Position
|
||||
LineRange *Range `json:",omitempty"`
|
||||
|
||||
// HunkPos is used only when golangci-lint is run over a diff
|
||||
|
|
|
@ -27,8 +27,8 @@ var replacePatterns = []replacePattern{
|
|||
{`^(\S+) can be (\S+)$`, "`${1}` can be `${2}`"},
|
||||
|
||||
// govet
|
||||
{`^(\S+) arg list ends with redundant newline$`, "`${1}` arg list ends with redundant newline"},
|
||||
{`^(\S+) composite literal uses unkeyed fields$`, "`${1}` composite literal uses unkeyed fields"},
|
||||
{`^printf: (\S+) arg list ends with redundant newline$`, "printf: `${1}` arg list ends with redundant newline"},
|
||||
{`^composites: (\S+) composite literal uses unkeyed fields$`, "composites: `${1}` composite literal uses unkeyed fields"},
|
||||
|
||||
// gosec
|
||||
{`^(\S+): Blacklisted import (\S+): weak cryptographic primitive$`,
|
||||
|
|
|
@ -54,11 +54,11 @@ type Nolint struct {
|
|||
unknownLintersSet map[string]bool
|
||||
}
|
||||
|
||||
func NewNolint(astCache *astcache.Cache, log logutils.Log) *Nolint {
|
||||
func NewNolint(astCache *astcache.Cache, log logutils.Log, dbManager *lintersdb.Manager) *Nolint {
|
||||
return &Nolint{
|
||||
cache: filesCache{},
|
||||
astCache: astCache,
|
||||
dbManager: lintersdb.NewManager(), // TODO: get it in constructor
|
||||
dbManager: dbManager,
|
||||
log: log,
|
||||
unknownLintersSet: map[string]bool{},
|
||||
}
|
||||
|
|
|
@ -6,6 +6,8 @@ import (
|
|||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/lint/lintersdb"
|
||||
|
||||
"github.com/golang/mock/gomock"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
|
@ -36,7 +38,7 @@ func newTestNolintProcessor(log logutils.Log) *Nolint {
|
|||
filepath.Join("testdata", "nolint2.go"),
|
||||
filepath.Join("testdata", "nolint_bad_names.go"),
|
||||
)
|
||||
return NewNolint(cache, log)
|
||||
return NewNolint(cache, log, lintersdb.NewManager(nil))
|
||||
}
|
||||
|
||||
func getOkLogger(ctrl *gomock.Controller) *logutils.MockLog {
|
||||
|
|
|
@ -32,7 +32,8 @@ func (p SourceCode) Process(issues []result.Issue) ([]result.Issue, error) {
|
|||
for lineNumber := lineRange.From; lineNumber <= lineRange.To; lineNumber++ {
|
||||
line, err := p.lineCache.GetLine(i.FilePath(), lineNumber)
|
||||
if err != nil {
|
||||
p.log.Warnf("Failed to get line %d for file %s: %s", i.FilePath(), lineNumber, err)
|
||||
p.log.Warnf("Failed to get line %d for file %s: %s",
|
||||
lineNumber, i.FilePath(), err)
|
||||
return i
|
||||
}
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ func buildTemplateContext() (map[string]interface{}, error) {
|
|||
|
||||
func getLintersListMarkdown(enabled bool) string {
|
||||
var neededLcs []*linter.Config
|
||||
lcs := lintersdb.NewManager().GetAllSupportedLinterConfigs()
|
||||
lcs := lintersdb.NewManager(nil).GetAllSupportedLinterConfigs()
|
||||
for _, lc := range lcs {
|
||||
if lc.EnabledByDefault == enabled {
|
||||
neededLcs = append(neededLcs, lc)
|
||||
|
@ -114,7 +114,7 @@ func getLintersListMarkdown(enabled bool) string {
|
|||
func getThanksList() string {
|
||||
var lines []string
|
||||
addedAuthors := map[string]bool{}
|
||||
for _, lc := range lintersdb.NewManager().GetAllSupportedLinterConfigs() {
|
||||
for _, lc := range lintersdb.NewManager(nil).GetAllSupportedLinterConfigs() {
|
||||
if lc.OriginalURL == "" {
|
||||
continue
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ func inSlice(s []string, v string) bool {
|
|||
}
|
||||
|
||||
func getEnabledByDefaultFastLintersExcept(except ...string) []string {
|
||||
m := lintersdb.NewManager()
|
||||
m := lintersdb.NewManager(nil)
|
||||
ebdl := m.GetAllEnabledByDefaultLinters()
|
||||
ret := []string{}
|
||||
for _, lc := range ebdl {
|
||||
|
@ -38,7 +38,7 @@ func getEnabledByDefaultFastLintersExcept(except ...string) []string {
|
|||
}
|
||||
|
||||
func getAllFastLintersWith(with ...string) []string {
|
||||
linters := lintersdb.NewManager().GetAllSupportedLinterConfigs()
|
||||
linters := lintersdb.NewManager(nil).GetAllSupportedLinterConfigs()
|
||||
ret := append([]string{}, with...)
|
||||
for _, lc := range linters {
|
||||
if lc.NeedsSSARepr {
|
||||
|
@ -51,7 +51,7 @@ func getAllFastLintersWith(with ...string) []string {
|
|||
}
|
||||
|
||||
func getEnabledByDefaultLinters() []string {
|
||||
ebdl := lintersdb.NewManager().GetAllEnabledByDefaultLinters()
|
||||
ebdl := lintersdb.NewManager(nil).GetAllEnabledByDefaultLinters()
|
||||
ret := []string{}
|
||||
for _, lc := range ebdl {
|
||||
ret = append(ret, lc.Name())
|
||||
|
@ -61,7 +61,7 @@ func getEnabledByDefaultLinters() []string {
|
|||
}
|
||||
|
||||
func getEnabledByDefaultFastLintersWith(with ...string) []string {
|
||||
ebdl := lintersdb.NewManager().GetAllEnabledByDefaultLinters()
|
||||
ebdl := lintersdb.NewManager(nil).GetAllEnabledByDefaultLinters()
|
||||
ret := append([]string{}, with...)
|
||||
for _, lc := range ebdl {
|
||||
if lc.NeedsSSARepr {
|
||||
|
|
|
@ -12,7 +12,7 @@ import (
|
|||
)
|
||||
|
||||
func getCommonRunArgs() []string {
|
||||
return []string{"--skip-dirs", "testdata_etc/"}
|
||||
return []string{"--skip-dirs", "testdata_etc/,pkg/golinters/goanalysis/(checker|passes)"}
|
||||
}
|
||||
|
||||
func withCommonRunArgs(args ...string) []string {
|
||||
|
|
4
test/testdata/govet.go
vendored
4
test/testdata/govet.go
vendored
|
@ -8,12 +8,12 @@ import (
|
|||
)
|
||||
|
||||
func Govet() error {
|
||||
return &os.PathError{"first", "path", os.ErrNotExist} // ERROR "`os.PathError` composite literal uses unkeyed fields"
|
||||
return &os.PathError{"first", "path", os.ErrNotExist} // ERROR "composites: `os.PathError` composite literal uses unkeyed fields"
|
||||
}
|
||||
|
||||
func GovetShadow(f io.Reader, buf []byte) (err error) {
|
||||
if f != nil {
|
||||
_, err := f.Read(buf) // ERROR "declaration of .err. shadows declaration at .*govet.go:\d+"
|
||||
_, err := f.Read(buf) // ERROR "shadow: declaration of .err. shadows declaration at line \d+"
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
27
vendor/github.com/golangci/govet/LICENSE
generated
vendored
27
vendor/github.com/golangci/govet/LICENSE
generated
vendored
|
@ -1,27 +0,0 @@
|
|||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
33
vendor/github.com/golangci/govet/README
generated
vendored
33
vendor/github.com/golangci/govet/README
generated
vendored
|
@ -1,33 +0,0 @@
|
|||
Vet is a tool that checks correctness of Go programs. It runs a suite of tests,
|
||||
each tailored to check for a particular class of errors. Examples include incorrect
|
||||
Printf format verbs and malformed build tags.
|
||||
|
||||
Over time many checks have been added to vet's suite, but many more have been
|
||||
rejected as not appropriate for the tool. The criteria applied when selecting which
|
||||
checks to add are:
|
||||
|
||||
Correctness:
|
||||
|
||||
Vet's checks are about correctness, not style. A vet check must identify real or
|
||||
potential bugs that could cause incorrect compilation or execution. A check that
|
||||
only identifies stylistic points or alternative correct approaches to a situation
|
||||
is not acceptable.
|
||||
|
||||
Frequency:
|
||||
|
||||
Vet is run every day by many programmers, often as part of every compilation or
|
||||
submission. The cost in execution time is considerable, especially in aggregate,
|
||||
so checks must be likely enough to find real problems that they are worth the
|
||||
overhead of the added check. A new check that finds only a handful of problems
|
||||
across all existing programs, even if the problem is significant, is not worth
|
||||
adding to the suite everyone runs daily.
|
||||
|
||||
Precision:
|
||||
|
||||
Most of vet's checks are heuristic and can generate both false positives (flagging
|
||||
correct programs) and false negatives (not flagging incorrect ones). The rate of
|
||||
both these failures must be very small. A check that is too noisy will be ignored
|
||||
by the programmer overwhelmed by the output; a check that misses too many of the
|
||||
cases it's looking for will give a false sense of security. Neither is acceptable.
|
||||
A vet check must be accurate enough that everything it reports is worth examining,
|
||||
and complete enough to encourage real confidence.
|
52
vendor/github.com/golangci/govet/assign.go
generated
vendored
52
vendor/github.com/golangci/govet/assign.go
generated
vendored
|
@ -1,52 +0,0 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
This file contains the code to check for useless assignments.
|
||||
*/
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("assign",
|
||||
"check for useless assignments",
|
||||
checkAssignStmt,
|
||||
assignStmt)
|
||||
}
|
||||
|
||||
// TODO: should also check for assignments to struct fields inside methods
|
||||
// that are on T instead of *T.
|
||||
|
||||
// checkAssignStmt checks for assignments of the form "<expr> = <expr>".
|
||||
// These are almost always useless, and even when they aren't they are usually a mistake.
|
||||
func checkAssignStmt(f *File, node ast.Node) {
|
||||
stmt := node.(*ast.AssignStmt)
|
||||
if stmt.Tok != token.ASSIGN {
|
||||
return // ignore :=
|
||||
}
|
||||
if len(stmt.Lhs) != len(stmt.Rhs) {
|
||||
// If LHS and RHS have different cardinality, they can't be the same.
|
||||
return
|
||||
}
|
||||
for i, lhs := range stmt.Lhs {
|
||||
rhs := stmt.Rhs[i]
|
||||
if hasSideEffects(f, lhs) || hasSideEffects(f, rhs) {
|
||||
continue // expressions may not be equal
|
||||
}
|
||||
if reflect.TypeOf(lhs) != reflect.TypeOf(rhs) {
|
||||
continue // short-circuit the heavy-weight gofmt check
|
||||
}
|
||||
le := f.gofmt(lhs)
|
||||
re := f.gofmt(rhs)
|
||||
if le == re {
|
||||
f.Badf(stmt.Pos(), "self-assignment of %s to %s", re, le)
|
||||
}
|
||||
}
|
||||
}
|
71
vendor/github.com/golangci/govet/atomic.go
generated
vendored
71
vendor/github.com/golangci/govet/atomic.go
generated
vendored
|
@ -1,71 +0,0 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("atomic",
|
||||
"check for common mistaken usages of the sync/atomic package",
|
||||
checkAtomicAssignment,
|
||||
assignStmt)
|
||||
}
|
||||
|
||||
// checkAtomicAssignment walks the assignment statement checking for common
|
||||
// mistaken usage of atomic package, such as: x = atomic.AddUint64(&x, 1)
|
||||
func checkAtomicAssignment(f *File, node ast.Node) {
|
||||
n := node.(*ast.AssignStmt)
|
||||
if len(n.Lhs) != len(n.Rhs) {
|
||||
return
|
||||
}
|
||||
if len(n.Lhs) == 1 && n.Tok == token.DEFINE {
|
||||
return
|
||||
}
|
||||
|
||||
for i, right := range n.Rhs {
|
||||
call, ok := right.(*ast.CallExpr)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
sel, ok := call.Fun.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
pkgIdent, _ := sel.X.(*ast.Ident)
|
||||
pkgName, ok := f.pkg.uses[pkgIdent].(*types.PkgName)
|
||||
if !ok || pkgName.Imported().Path() != "sync/atomic" {
|
||||
continue
|
||||
}
|
||||
|
||||
switch sel.Sel.Name {
|
||||
case "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr":
|
||||
f.checkAtomicAddAssignment(n.Lhs[i], call)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkAtomicAddAssignment walks the atomic.Add* method calls checking for assigning the return value
|
||||
// to the same variable being used in the operation
|
||||
func (f *File) checkAtomicAddAssignment(left ast.Expr, call *ast.CallExpr) {
|
||||
if len(call.Args) != 2 {
|
||||
return
|
||||
}
|
||||
arg := call.Args[0]
|
||||
broken := false
|
||||
|
||||
if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND {
|
||||
broken = f.gofmt(left) == f.gofmt(uarg.X)
|
||||
} else if star, ok := left.(*ast.StarExpr); ok {
|
||||
broken = f.gofmt(star.X) == f.gofmt(arg)
|
||||
}
|
||||
|
||||
if broken {
|
||||
f.Bad(left.Pos(), "direct assignment to atomic value")
|
||||
}
|
||||
}
|
91
vendor/github.com/golangci/govet/buildtag.go
generated
vendored
91
vendor/github.com/golangci/govet/buildtag.go
generated
vendored
|
@ -1,91 +0,0 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
var (
|
||||
nl = []byte("\n")
|
||||
slashSlash = []byte("//")
|
||||
plusBuild = []byte("+build")
|
||||
)
|
||||
|
||||
// checkBuildTag checks that build tags are in the correct location and well-formed.
|
||||
func checkBuildTag(name string, data []byte) {
|
||||
if !vet("buildtags") {
|
||||
return
|
||||
}
|
||||
lines := bytes.SplitAfter(data, nl)
|
||||
|
||||
// Determine cutpoint where +build comments are no longer valid.
|
||||
// They are valid in leading // comments in the file followed by
|
||||
// a blank line.
|
||||
var cutoff int
|
||||
for i, line := range lines {
|
||||
line = bytes.TrimSpace(line)
|
||||
if len(line) == 0 {
|
||||
cutoff = i
|
||||
continue
|
||||
}
|
||||
if bytes.HasPrefix(line, slashSlash) {
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
for i, line := range lines {
|
||||
line = bytes.TrimSpace(line)
|
||||
if !bytes.HasPrefix(line, slashSlash) {
|
||||
continue
|
||||
}
|
||||
text := bytes.TrimSpace(line[2:])
|
||||
if bytes.HasPrefix(text, plusBuild) {
|
||||
fields := bytes.Fields(text)
|
||||
if !bytes.Equal(fields[0], plusBuild) {
|
||||
// Comment is something like +buildasdf not +build.
|
||||
fmt.Fprintf(os.Stderr, "%s:%d: possible malformed +build comment\n", name, i+1)
|
||||
setExit(1)
|
||||
continue
|
||||
}
|
||||
if i >= cutoff {
|
||||
fmt.Fprintf(os.Stderr, "%s:%d: +build comment must appear before package clause and be followed by a blank line\n", name, i+1)
|
||||
setExit(1)
|
||||
continue
|
||||
}
|
||||
// Check arguments.
|
||||
Args:
|
||||
for _, arg := range fields[1:] {
|
||||
for _, elem := range strings.Split(string(arg), ",") {
|
||||
if strings.HasPrefix(elem, "!!") {
|
||||
fmt.Fprintf(os.Stderr, "%s:%d: invalid double negative in build constraint: %s\n", name, i+1, arg)
|
||||
setExit(1)
|
||||
break Args
|
||||
}
|
||||
elem = strings.TrimPrefix(elem, "!")
|
||||
for _, c := range elem {
|
||||
if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' {
|
||||
fmt.Fprintf(os.Stderr, "%s:%d: invalid non-alphanumeric build constraint: %s\n", name, i+1, arg)
|
||||
setExit(1)
|
||||
break Args
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Comment with +build but not at beginning.
|
||||
if bytes.Contains(line, plusBuild) && i < cutoff {
|
||||
fmt.Fprintf(os.Stderr, "%s:%d: possible malformed +build comment\n", name, i+1)
|
||||
setExit(1)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
141
vendor/github.com/golangci/govet/cgo.go
generated
vendored
141
vendor/github.com/golangci/govet/cgo.go
generated
vendored
|
@ -1,141 +0,0 @@
|
|||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Check for invalid cgo pointer passing.
|
||||
// This looks for code that uses cgo to call C code passing values
|
||||
// whose types are almost always invalid according to the cgo pointer
|
||||
// sharing rules.
|
||||
// Specifically, it warns about attempts to pass a Go chan, map, func,
|
||||
// or slice to C, either directly, or via a pointer, array, or struct.
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("cgocall",
|
||||
"check for types that may not be passed to cgo calls",
|
||||
checkCgoCall,
|
||||
callExpr)
|
||||
}
|
||||
|
||||
func checkCgoCall(f *File, node ast.Node) {
|
||||
x := node.(*ast.CallExpr)
|
||||
|
||||
// We are only looking for calls to functions imported from
|
||||
// the "C" package.
|
||||
sel, ok := x.Fun.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
id, ok := sel.X.(*ast.Ident)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
pkgname, ok := f.pkg.uses[id].(*types.PkgName)
|
||||
if !ok || pkgname.Imported().Path() != "C" {
|
||||
return
|
||||
}
|
||||
|
||||
// A call to C.CBytes passes a pointer but is always safe.
|
||||
if sel.Sel.Name == "CBytes" {
|
||||
return
|
||||
}
|
||||
|
||||
for _, arg := range x.Args {
|
||||
if !typeOKForCgoCall(cgoBaseType(f, arg), make(map[types.Type]bool)) {
|
||||
f.Badf(arg.Pos(), "possibly passing Go type with embedded pointer to C")
|
||||
}
|
||||
|
||||
// Check for passing the address of a bad type.
|
||||
if conv, ok := arg.(*ast.CallExpr); ok && len(conv.Args) == 1 && f.hasBasicType(conv.Fun, types.UnsafePointer) {
|
||||
arg = conv.Args[0]
|
||||
}
|
||||
if u, ok := arg.(*ast.UnaryExpr); ok && u.Op == token.AND {
|
||||
if !typeOKForCgoCall(cgoBaseType(f, u.X), make(map[types.Type]bool)) {
|
||||
f.Badf(arg.Pos(), "possibly passing Go type with embedded pointer to C")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// cgoBaseType tries to look through type conversions involving
|
||||
// unsafe.Pointer to find the real type. It converts:
|
||||
// unsafe.Pointer(x) => x
|
||||
// *(*unsafe.Pointer)(unsafe.Pointer(&x)) => x
|
||||
func cgoBaseType(f *File, arg ast.Expr) types.Type {
|
||||
switch arg := arg.(type) {
|
||||
case *ast.CallExpr:
|
||||
if len(arg.Args) == 1 && f.hasBasicType(arg.Fun, types.UnsafePointer) {
|
||||
return cgoBaseType(f, arg.Args[0])
|
||||
}
|
||||
case *ast.StarExpr:
|
||||
call, ok := arg.X.(*ast.CallExpr)
|
||||
if !ok || len(call.Args) != 1 {
|
||||
break
|
||||
}
|
||||
// Here arg is *f(v).
|
||||
t := f.pkg.types[call.Fun].Type
|
||||
if t == nil {
|
||||
break
|
||||
}
|
||||
ptr, ok := t.Underlying().(*types.Pointer)
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
// Here arg is *(*p)(v)
|
||||
elem, ok := ptr.Elem().Underlying().(*types.Basic)
|
||||
if !ok || elem.Kind() != types.UnsafePointer {
|
||||
break
|
||||
}
|
||||
// Here arg is *(*unsafe.Pointer)(v)
|
||||
call, ok = call.Args[0].(*ast.CallExpr)
|
||||
if !ok || len(call.Args) != 1 {
|
||||
break
|
||||
}
|
||||
// Here arg is *(*unsafe.Pointer)(f(v))
|
||||
if !f.hasBasicType(call.Fun, types.UnsafePointer) {
|
||||
break
|
||||
}
|
||||
// Here arg is *(*unsafe.Pointer)(unsafe.Pointer(v))
|
||||
u, ok := call.Args[0].(*ast.UnaryExpr)
|
||||
if !ok || u.Op != token.AND {
|
||||
break
|
||||
}
|
||||
// Here arg is *(*unsafe.Pointer)(unsafe.Pointer(&v))
|
||||
return cgoBaseType(f, u.X)
|
||||
}
|
||||
|
||||
return f.pkg.types[arg].Type
|
||||
}
|
||||
|
||||
// typeOKForCgoCall reports whether the type of arg is OK to pass to a
|
||||
// C function using cgo. This is not true for Go types with embedded
|
||||
// pointers. m is used to avoid infinite recursion on recursive types.
|
||||
func typeOKForCgoCall(t types.Type, m map[types.Type]bool) bool {
|
||||
if t == nil || m[t] {
|
||||
return true
|
||||
}
|
||||
m[t] = true
|
||||
switch t := t.Underlying().(type) {
|
||||
case *types.Chan, *types.Map, *types.Signature, *types.Slice:
|
||||
return false
|
||||
case *types.Pointer:
|
||||
return typeOKForCgoCall(t.Elem(), m)
|
||||
case *types.Array:
|
||||
return typeOKForCgoCall(t.Elem(), m)
|
||||
case *types.Struct:
|
||||
for i := 0; i < t.NumFields(); i++ {
|
||||
if !typeOKForCgoCall(t.Field(i).Type(), m) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
94
vendor/github.com/golangci/govet/composite.go
generated
vendored
94
vendor/github.com/golangci/govet/composite.go
generated
vendored
|
@ -1,94 +0,0 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the test for unkeyed struct literals.
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"go/ast"
|
||||
"go/types"
|
||||
"strings"
|
||||
|
||||
"github.com/golangci/govet/lib/whitelist"
|
||||
)
|
||||
|
||||
var compositeWhiteList = flag.Bool("compositewhitelist", true, "use composite white list; for testing only")
|
||||
|
||||
func init() {
|
||||
register("composites",
|
||||
"check that composite literals used field-keyed elements",
|
||||
checkUnkeyedLiteral,
|
||||
compositeLit)
|
||||
}
|
||||
|
||||
// checkUnkeyedLiteral checks if a composite literal is a struct literal with
|
||||
// unkeyed fields.
|
||||
func checkUnkeyedLiteral(f *File, node ast.Node) {
|
||||
if strings.HasSuffix(f.name, "_test.go") {
|
||||
return
|
||||
}
|
||||
|
||||
cl := node.(*ast.CompositeLit)
|
||||
|
||||
typ := f.pkg.types[cl].Type
|
||||
if typ == nil {
|
||||
// cannot determine composite literals' type, skip it
|
||||
return
|
||||
}
|
||||
typeName := typ.String()
|
||||
if *compositeWhiteList && whitelist.UnkeyedLiteral[typeName] {
|
||||
// skip whitelisted types
|
||||
return
|
||||
}
|
||||
under := typ.Underlying()
|
||||
for {
|
||||
ptr, ok := under.(*types.Pointer)
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
under = ptr.Elem().Underlying()
|
||||
}
|
||||
if _, ok := under.(*types.Struct); !ok {
|
||||
// skip non-struct composite literals
|
||||
return
|
||||
}
|
||||
if isLocalType(f, typ) {
|
||||
// allow unkeyed locally defined composite literal
|
||||
return
|
||||
}
|
||||
|
||||
// check if the CompositeLit contains an unkeyed field
|
||||
allKeyValue := true
|
||||
for _, e := range cl.Elts {
|
||||
if _, ok := e.(*ast.KeyValueExpr); !ok {
|
||||
allKeyValue = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if allKeyValue {
|
||||
// all the composite literal fields are keyed
|
||||
return
|
||||
}
|
||||
|
||||
f.Badf(cl.Pos(), "%s composite literal uses unkeyed fields",
|
||||
types.TypeString(typ, func(pkg *types.Package) string {
|
||||
return pkg.Name()
|
||||
}))
|
||||
}
|
||||
|
||||
func isLocalType(f *File, typ types.Type) bool {
|
||||
switch x := typ.(type) {
|
||||
case *types.Struct:
|
||||
// struct literals are local types
|
||||
return true
|
||||
case *types.Pointer:
|
||||
return isLocalType(f, x.Elem())
|
||||
case *types.Named:
|
||||
// names in package foo are local to foo_test too
|
||||
return strings.TrimSuffix(x.Obj().Pkg().Path(), "_test") == strings.TrimSuffix(f.pkg.typesPkg.Path(), "_test")
|
||||
}
|
||||
return false
|
||||
}
|
256
vendor/github.com/golangci/govet/copylock.go
generated
vendored
256
vendor/github.com/golangci/govet/copylock.go
generated
vendored
|
@ -1,256 +0,0 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the code to check that locks are not passed by value.
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("copylocks",
|
||||
"check that locks are not passed by value",
|
||||
checkCopyLocks,
|
||||
funcDecl, rangeStmt, funcLit, callExpr, assignStmt, genDecl, compositeLit, returnStmt)
|
||||
}
|
||||
|
||||
// checkCopyLocks checks whether node might
|
||||
// inadvertently copy a lock.
|
||||
func checkCopyLocks(f *File, node ast.Node) {
|
||||
switch node := node.(type) {
|
||||
case *ast.RangeStmt:
|
||||
checkCopyLocksRange(f, node)
|
||||
case *ast.FuncDecl:
|
||||
checkCopyLocksFunc(f, node.Name.Name, node.Recv, node.Type)
|
||||
case *ast.FuncLit:
|
||||
checkCopyLocksFunc(f, "func", nil, node.Type)
|
||||
case *ast.CallExpr:
|
||||
checkCopyLocksCallExpr(f, node)
|
||||
case *ast.AssignStmt:
|
||||
checkCopyLocksAssign(f, node)
|
||||
case *ast.GenDecl:
|
||||
checkCopyLocksGenDecl(f, node)
|
||||
case *ast.CompositeLit:
|
||||
checkCopyLocksCompositeLit(f, node)
|
||||
case *ast.ReturnStmt:
|
||||
checkCopyLocksReturnStmt(f, node)
|
||||
}
|
||||
}
|
||||
|
||||
// checkCopyLocksAssign checks whether an assignment
|
||||
// copies a lock.
|
||||
func checkCopyLocksAssign(f *File, as *ast.AssignStmt) {
|
||||
for i, x := range as.Rhs {
|
||||
if path := lockPathRhs(f, x); path != nil {
|
||||
f.Badf(x.Pos(), "assignment copies lock value to %v: %v", f.gofmt(as.Lhs[i]), path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkCopyLocksGenDecl checks whether lock is copied
|
||||
// in variable declaration.
|
||||
func checkCopyLocksGenDecl(f *File, gd *ast.GenDecl) {
|
||||
if gd.Tok != token.VAR {
|
||||
return
|
||||
}
|
||||
for _, spec := range gd.Specs {
|
||||
valueSpec := spec.(*ast.ValueSpec)
|
||||
for i, x := range valueSpec.Values {
|
||||
if path := lockPathRhs(f, x); path != nil {
|
||||
f.Badf(x.Pos(), "variable declaration copies lock value to %v: %v", valueSpec.Names[i].Name, path)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkCopyLocksCompositeLit detects lock copy inside a composite literal
|
||||
func checkCopyLocksCompositeLit(f *File, cl *ast.CompositeLit) {
|
||||
for _, x := range cl.Elts {
|
||||
if node, ok := x.(*ast.KeyValueExpr); ok {
|
||||
x = node.Value
|
||||
}
|
||||
if path := lockPathRhs(f, x); path != nil {
|
||||
f.Badf(x.Pos(), "literal copies lock value from %v: %v", f.gofmt(x), path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkCopyLocksReturnStmt detects lock copy in return statement
|
||||
func checkCopyLocksReturnStmt(f *File, rs *ast.ReturnStmt) {
|
||||
for _, x := range rs.Results {
|
||||
if path := lockPathRhs(f, x); path != nil {
|
||||
f.Badf(x.Pos(), "return copies lock value: %v", path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkCopyLocksCallExpr detects lock copy in the arguments to a function call
|
||||
func checkCopyLocksCallExpr(f *File, ce *ast.CallExpr) {
|
||||
var id *ast.Ident
|
||||
switch fun := ce.Fun.(type) {
|
||||
case *ast.Ident:
|
||||
id = fun
|
||||
case *ast.SelectorExpr:
|
||||
id = fun.Sel
|
||||
}
|
||||
if fun, ok := f.pkg.uses[id].(*types.Builtin); ok {
|
||||
switch fun.Name() {
|
||||
case "new", "len", "cap", "Sizeof":
|
||||
return
|
||||
}
|
||||
}
|
||||
for _, x := range ce.Args {
|
||||
if path := lockPathRhs(f, x); path != nil {
|
||||
f.Badf(x.Pos(), "call of %s copies lock value: %v", f.gofmt(ce.Fun), path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkCopyLocksFunc checks whether a function might
|
||||
// inadvertently copy a lock, by checking whether
|
||||
// its receiver, parameters, or return values
|
||||
// are locks.
|
||||
func checkCopyLocksFunc(f *File, name string, recv *ast.FieldList, typ *ast.FuncType) {
|
||||
if recv != nil && len(recv.List) > 0 {
|
||||
expr := recv.List[0].Type
|
||||
if path := lockPath(f.pkg.typesPkg, f.pkg.types[expr].Type); path != nil {
|
||||
f.Badf(expr.Pos(), "%s passes lock by value: %v", name, path)
|
||||
}
|
||||
}
|
||||
|
||||
if typ.Params != nil {
|
||||
for _, field := range typ.Params.List {
|
||||
expr := field.Type
|
||||
if path := lockPath(f.pkg.typesPkg, f.pkg.types[expr].Type); path != nil {
|
||||
f.Badf(expr.Pos(), "%s passes lock by value: %v", name, path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Don't check typ.Results. If T has a Lock field it's OK to write
|
||||
// return T{}
|
||||
// because that is returning the zero value. Leave result checking
|
||||
// to the return statement.
|
||||
}
|
||||
|
||||
// checkCopyLocksRange checks whether a range statement
|
||||
// might inadvertently copy a lock by checking whether
|
||||
// any of the range variables are locks.
|
||||
func checkCopyLocksRange(f *File, r *ast.RangeStmt) {
|
||||
checkCopyLocksRangeVar(f, r.Tok, r.Key)
|
||||
checkCopyLocksRangeVar(f, r.Tok, r.Value)
|
||||
}
|
||||
|
||||
func checkCopyLocksRangeVar(f *File, rtok token.Token, e ast.Expr) {
|
||||
if e == nil {
|
||||
return
|
||||
}
|
||||
id, isId := e.(*ast.Ident)
|
||||
if isId && id.Name == "_" {
|
||||
return
|
||||
}
|
||||
|
||||
var typ types.Type
|
||||
if rtok == token.DEFINE {
|
||||
if !isId {
|
||||
return
|
||||
}
|
||||
obj := f.pkg.defs[id]
|
||||
if obj == nil {
|
||||
return
|
||||
}
|
||||
typ = obj.Type()
|
||||
} else {
|
||||
typ = f.pkg.types[e].Type
|
||||
}
|
||||
|
||||
if typ == nil {
|
||||
return
|
||||
}
|
||||
if path := lockPath(f.pkg.typesPkg, typ); path != nil {
|
||||
f.Badf(e.Pos(), "range var %s copies lock: %v", f.gofmt(e), path)
|
||||
}
|
||||
}
|
||||
|
||||
type typePath []types.Type
|
||||
|
||||
// String pretty-prints a typePath.
|
||||
func (path typePath) String() string {
|
||||
n := len(path)
|
||||
var buf bytes.Buffer
|
||||
for i := range path {
|
||||
if i > 0 {
|
||||
fmt.Fprint(&buf, " contains ")
|
||||
}
|
||||
// The human-readable path is in reverse order, outermost to innermost.
|
||||
fmt.Fprint(&buf, path[n-i-1].String())
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func lockPathRhs(f *File, x ast.Expr) typePath {
|
||||
if _, ok := x.(*ast.CompositeLit); ok {
|
||||
return nil
|
||||
}
|
||||
if _, ok := x.(*ast.CallExpr); ok {
|
||||
// A call may return a zero value.
|
||||
return nil
|
||||
}
|
||||
if star, ok := x.(*ast.StarExpr); ok {
|
||||
if _, ok := star.X.(*ast.CallExpr); ok {
|
||||
// A call may return a pointer to a zero value.
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return lockPath(f.pkg.typesPkg, f.pkg.types[x].Type)
|
||||
}
|
||||
|
||||
// lockPath returns a typePath describing the location of a lock value
|
||||
// contained in typ. If there is no contained lock, it returns nil.
|
||||
func lockPath(tpkg *types.Package, typ types.Type) typePath {
|
||||
if typ == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for {
|
||||
atyp, ok := typ.Underlying().(*types.Array)
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
typ = atyp.Elem()
|
||||
}
|
||||
|
||||
// We're only interested in the case in which the underlying
|
||||
// type is a struct. (Interfaces and pointers are safe to copy.)
|
||||
styp, ok := typ.Underlying().(*types.Struct)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// We're looking for cases in which a reference to this type
|
||||
// can be locked, but a value cannot. This differentiates
|
||||
// embedded interfaces from embedded values.
|
||||
if plock := types.NewMethodSet(types.NewPointer(typ)).Lookup(tpkg, "Lock"); plock != nil {
|
||||
if lock := types.NewMethodSet(typ).Lookup(tpkg, "Lock"); lock == nil {
|
||||
return []types.Type{typ}
|
||||
}
|
||||
}
|
||||
|
||||
nfields := styp.NumFields()
|
||||
for i := 0; i < nfields; i++ {
|
||||
ftyp := styp.Field(i).Type()
|
||||
subpath := lockPath(tpkg, ftyp)
|
||||
if subpath != nil {
|
||||
return append(subpath, typ)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
224
vendor/github.com/golangci/govet/doc.go
generated
vendored
224
vendor/github.com/golangci/govet/doc.go
generated
vendored
|
@ -1,224 +0,0 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
|
||||
Vet examines Go source code and reports suspicious constructs, such as Printf
|
||||
calls whose arguments do not align with the format string. Vet uses heuristics
|
||||
that do not guarantee all reports are genuine problems, but it can find errors
|
||||
not caught by the compilers.
|
||||
|
||||
Vet is normally invoked using the go command by running "go vet":
|
||||
|
||||
go vet
|
||||
vets the package in the current directory.
|
||||
|
||||
go vet package/path/name
|
||||
vets the package whose path is provided.
|
||||
|
||||
Use "go help packages" to see other ways of specifying which packages to vet.
|
||||
|
||||
Vet's exit code is 2 for erroneous invocation of the tool, 1 if a
|
||||
problem was reported, and 0 otherwise. Note that the tool does not
|
||||
check every possible problem and depends on unreliable heuristics
|
||||
so it should be used as guidance only, not as a firm indicator of
|
||||
program correctness.
|
||||
|
||||
By default the -all flag is set so all checks are performed.
|
||||
If any flags are explicitly set to true, only those tests are run. Conversely, if
|
||||
any flag is explicitly set to false, only those tests are disabled. Thus -printf=true
|
||||
runs the printf check, -printf=false runs all checks except the printf check.
|
||||
|
||||
By default vet uses the object files generated by 'go install some/pkg' to typecheck the code.
|
||||
If the -source flag is provided, vet uses only source code.
|
||||
|
||||
Available checks:
|
||||
|
||||
Assembly declarations
|
||||
|
||||
Flag: -asmdecl
|
||||
|
||||
Mismatches between assembly files and Go function declarations.
|
||||
|
||||
Useless assignments
|
||||
|
||||
Flag: -assign
|
||||
|
||||
Check for useless assignments.
|
||||
|
||||
Atomic mistakes
|
||||
|
||||
Flag: -atomic
|
||||
|
||||
Common mistaken usages of the sync/atomic package.
|
||||
|
||||
Boolean conditions
|
||||
|
||||
Flag: -bool
|
||||
|
||||
Mistakes involving boolean operators.
|
||||
|
||||
Build tags
|
||||
|
||||
Flag: -buildtags
|
||||
|
||||
Badly formed or misplaced +build tags.
|
||||
|
||||
Invalid uses of cgo
|
||||
|
||||
Flag: -cgocall
|
||||
|
||||
Detect some violations of the cgo pointer passing rules.
|
||||
|
||||
Unkeyed composite literals
|
||||
|
||||
Flag: -composites
|
||||
|
||||
Composite struct literals that do not use the field-keyed syntax.
|
||||
|
||||
Copying locks
|
||||
|
||||
Flag: -copylocks
|
||||
|
||||
Locks that are erroneously passed by value.
|
||||
|
||||
HTTP responses used incorrectly
|
||||
|
||||
Flag: -httpresponse
|
||||
|
||||
Mistakes deferring a function call on an HTTP response before
|
||||
checking whether the error returned with the response was nil.
|
||||
|
||||
Failure to call the cancelation function returned by WithCancel
|
||||
|
||||
Flag: -lostcancel
|
||||
|
||||
The cancelation function returned by context.WithCancel, WithTimeout,
|
||||
and WithDeadline must be called or the new context will remain live
|
||||
until its parent context is cancelled.
|
||||
(The background context is never cancelled.)
|
||||
|
||||
Methods
|
||||
|
||||
Flag: -methods
|
||||
|
||||
Non-standard signatures for methods with familiar names, including:
|
||||
Format GobEncode GobDecode MarshalJSON MarshalXML
|
||||
Peek ReadByte ReadFrom ReadRune Scan Seek
|
||||
UnmarshalJSON UnreadByte UnreadRune WriteByte
|
||||
WriteTo
|
||||
|
||||
Nil function comparison
|
||||
|
||||
Flag: -nilfunc
|
||||
|
||||
Comparisons between functions and nil.
|
||||
|
||||
Printf family
|
||||
|
||||
Flag: -printf
|
||||
|
||||
Suspicious calls to functions in the Printf family, including any functions
|
||||
with these names, disregarding case:
|
||||
Print Printf Println
|
||||
Fprint Fprintf Fprintln
|
||||
Sprint Sprintf Sprintln
|
||||
Error Errorf
|
||||
Fatal Fatalf
|
||||
Log Logf
|
||||
Panic Panicf Panicln
|
||||
The -printfuncs flag can be used to redefine this list.
|
||||
If the function name ends with an 'f', the function is assumed to take
|
||||
a format descriptor string in the manner of fmt.Printf. If not, vet
|
||||
complains about arguments that look like format descriptor strings.
|
||||
|
||||
It also checks for errors such as using a Writer as the first argument of
|
||||
Printf.
|
||||
|
||||
Range loop variables
|
||||
|
||||
Flag: -rangeloops
|
||||
|
||||
Incorrect uses of range loop variables in closures.
|
||||
|
||||
Shadowed variables
|
||||
|
||||
Flag: -shadow=false (experimental; must be set explicitly)
|
||||
|
||||
Variables that may have been unintentionally shadowed.
|
||||
|
||||
Shifts
|
||||
|
||||
Flag: -shift
|
||||
|
||||
Shifts equal to or longer than the variable's length.
|
||||
|
||||
Struct tags
|
||||
|
||||
Flag: -structtags
|
||||
|
||||
Struct tags that do not follow the format understood by reflect.StructTag.Get.
|
||||
Well-known encoding struct tags (json, xml) used with unexported fields.
|
||||
|
||||
Tests and documentation examples
|
||||
|
||||
Flag: -tests
|
||||
|
||||
Mistakes involving tests including functions with incorrect names or signatures
|
||||
and example tests that document identifiers not in the package.
|
||||
|
||||
Unreachable code
|
||||
|
||||
Flag: -unreachable
|
||||
|
||||
Unreachable code.
|
||||
|
||||
Misuse of unsafe Pointers
|
||||
|
||||
Flag: -unsafeptr
|
||||
|
||||
Likely incorrect uses of unsafe.Pointer to convert integers to pointers.
|
||||
A conversion from uintptr to unsafe.Pointer is invalid if it implies that
|
||||
there is a uintptr-typed word in memory that holds a pointer value,
|
||||
because that word will be invisible to stack copying and to the garbage
|
||||
collector.
|
||||
|
||||
Unused result of certain function calls
|
||||
|
||||
Flag: -unusedresult
|
||||
|
||||
Calls to well-known functions and methods that return a value that is
|
||||
discarded. By default, this includes functions like fmt.Errorf and
|
||||
fmt.Sprintf and methods like String and Error. The flags -unusedfuncs
|
||||
and -unusedstringmethods control the set.
|
||||
|
||||
Other flags
|
||||
|
||||
These flags configure the behavior of vet:
|
||||
|
||||
-all (default true)
|
||||
Enable all non-experimental checks.
|
||||
-v
|
||||
Verbose mode
|
||||
-printfuncs
|
||||
A comma-separated list of print-like function names
|
||||
to supplement the standard list.
|
||||
For more information, see the discussion of the -printf flag.
|
||||
-shadowstrict
|
||||
Whether to be strict about shadowing; can be noisy.
|
||||
|
||||
Using vet directly
|
||||
|
||||
For testing and debugging vet can be run directly by invoking
|
||||
"go tool vet" or just running the binary. Run this way, vet might not
|
||||
have up to date information for imported packages.
|
||||
|
||||
go tool vet source/directory/*.go
|
||||
vets the files named, all of which must be in the same package.
|
||||
|
||||
go tool vet source/directory
|
||||
recursively descends the directory, vetting each package it finds.
|
||||
|
||||
*/
|
||||
package govet
|
51
vendor/github.com/golangci/govet/golangci.go
generated
vendored
51
vendor/github.com/golangci/govet/golangci.go
generated
vendored
|
@ -1,51 +0,0 @@
|
|||
package govet
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/loader"
|
||||
)
|
||||
|
||||
type Issue struct {
|
||||
Pos token.Position
|
||||
Message string
|
||||
}
|
||||
|
||||
var foundIssues []Issue
|
||||
|
||||
func Analyze(files []*ast.File, fset *token.FileSet, pkgInfo *loader.PackageInfo, checkShadowing bool, pg astFilePathGetter) ([]Issue, error) {
|
||||
foundIssues = nil
|
||||
*source = false // import type data for "fmt" from installed packages
|
||||
|
||||
if checkShadowing {
|
||||
experimental["shadow"] = false
|
||||
}
|
||||
for name, setting := range report {
|
||||
if *setting == unset && !experimental[name] {
|
||||
*setting = setTrue
|
||||
}
|
||||
}
|
||||
|
||||
initPrintFlags()
|
||||
initUnusedFlags()
|
||||
|
||||
filesRun = true
|
||||
for _, f := range files {
|
||||
name := fset.Position(f.Pos()).Filename
|
||||
if !strings.HasSuffix(name, "_test.go") {
|
||||
includesNonTest = true
|
||||
}
|
||||
}
|
||||
pkg, err := doPackage(nil, pkgInfo, fset, files, pg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if pkg == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return foundIssues, nil
|
||||
}
|
137
vendor/github.com/golangci/govet/httpresponse.go
generated
vendored
137
vendor/github.com/golangci/govet/httpresponse.go
generated
vendored
|
@ -1,137 +0,0 @@
|
|||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the check for http.Response values being used before
|
||||
// checking for errors.
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("httpresponse",
|
||||
"check errors are checked before using an http Response",
|
||||
checkHTTPResponse, callExpr)
|
||||
}
|
||||
|
||||
func checkHTTPResponse(f *File, node ast.Node) {
|
||||
call := node.(*ast.CallExpr)
|
||||
if !isHTTPFuncOrMethodOnClient(f, call) {
|
||||
return // the function call is not related to this check.
|
||||
}
|
||||
|
||||
finder := &blockStmtFinder{node: call}
|
||||
ast.Walk(finder, f.file)
|
||||
stmts := finder.stmts()
|
||||
if len(stmts) < 2 {
|
||||
return // the call to the http function is the last statement of the block.
|
||||
}
|
||||
|
||||
asg, ok := stmts[0].(*ast.AssignStmt)
|
||||
if !ok {
|
||||
return // the first statement is not assignment.
|
||||
}
|
||||
resp := rootIdent(asg.Lhs[0])
|
||||
if resp == nil {
|
||||
return // could not find the http.Response in the assignment.
|
||||
}
|
||||
|
||||
def, ok := stmts[1].(*ast.DeferStmt)
|
||||
if !ok {
|
||||
return // the following statement is not a defer.
|
||||
}
|
||||
root := rootIdent(def.Call.Fun)
|
||||
if root == nil {
|
||||
return // could not find the receiver of the defer call.
|
||||
}
|
||||
|
||||
if resp.Obj == root.Obj {
|
||||
f.Badf(root.Pos(), "using %s before checking for errors", resp.Name)
|
||||
}
|
||||
}
|
||||
|
||||
// isHTTPFuncOrMethodOnClient checks whether the given call expression is on
|
||||
// either a function of the net/http package or a method of http.Client that
|
||||
// returns (*http.Response, error).
|
||||
func isHTTPFuncOrMethodOnClient(f *File, expr *ast.CallExpr) bool {
|
||||
fun, _ := expr.Fun.(*ast.SelectorExpr)
|
||||
sig, _ := f.pkg.types[fun].Type.(*types.Signature)
|
||||
if sig == nil {
|
||||
return false // the call is not on of the form x.f()
|
||||
}
|
||||
|
||||
res := sig.Results()
|
||||
if res.Len() != 2 {
|
||||
return false // the function called does not return two values.
|
||||
}
|
||||
if ptr, ok := res.At(0).Type().(*types.Pointer); !ok || !isNamedType(ptr.Elem(), "net/http", "Response") {
|
||||
return false // the first return type is not *http.Response.
|
||||
}
|
||||
if !types.Identical(res.At(1).Type().Underlying(), errorType) {
|
||||
return false // the second return type is not error
|
||||
}
|
||||
|
||||
typ := f.pkg.types[fun.X].Type
|
||||
if typ == nil {
|
||||
id, ok := fun.X.(*ast.Ident)
|
||||
return ok && id.Name == "http" // function in net/http package.
|
||||
}
|
||||
|
||||
if isNamedType(typ, "net/http", "Client") {
|
||||
return true // method on http.Client.
|
||||
}
|
||||
ptr, ok := typ.(*types.Pointer)
|
||||
return ok && isNamedType(ptr.Elem(), "net/http", "Client") // method on *http.Client.
|
||||
}
|
||||
|
||||
// blockStmtFinder is an ast.Visitor that given any ast node can find the
|
||||
// statement containing it and its succeeding statements in the same block.
|
||||
type blockStmtFinder struct {
|
||||
node ast.Node // target of search
|
||||
stmt ast.Stmt // innermost statement enclosing argument to Visit
|
||||
block *ast.BlockStmt // innermost block enclosing argument to Visit.
|
||||
}
|
||||
|
||||
// Visit finds f.node performing a search down the ast tree.
|
||||
// It keeps the last block statement and statement seen for later use.
|
||||
func (f *blockStmtFinder) Visit(node ast.Node) ast.Visitor {
|
||||
if node == nil || f.node.Pos() < node.Pos() || f.node.End() > node.End() {
|
||||
return nil // not here
|
||||
}
|
||||
switch n := node.(type) {
|
||||
case *ast.BlockStmt:
|
||||
f.block = n
|
||||
case ast.Stmt:
|
||||
f.stmt = n
|
||||
}
|
||||
if f.node.Pos() == node.Pos() && f.node.End() == node.End() {
|
||||
return nil // found
|
||||
}
|
||||
return f // keep looking
|
||||
}
|
||||
|
||||
// stmts returns the statements of f.block starting from the one including f.node.
|
||||
func (f *blockStmtFinder) stmts() []ast.Stmt {
|
||||
for i, v := range f.block.List {
|
||||
if f.stmt == v {
|
||||
return f.block.List[i:]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// rootIdent finds the root identifier x in a chain of selections x.y.z, or nil if not found.
|
||||
func rootIdent(n ast.Node) *ast.Ident {
|
||||
switch n := n.(type) {
|
||||
case *ast.SelectorExpr:
|
||||
return rootIdent(n.X)
|
||||
case *ast.Ident:
|
||||
return n
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
323
vendor/github.com/golangci/govet/lostcancel.go
generated
vendored
323
vendor/github.com/golangci/govet/lostcancel.go
generated
vendored
|
@ -1,323 +0,0 @@
|
|||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/types"
|
||||
"strconv"
|
||||
|
||||
"github.com/golangci/govet/lib/cfg"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("lostcancel",
|
||||
"check for failure to call cancelation function returned by context.WithCancel",
|
||||
checkLostCancel,
|
||||
funcDecl, funcLit)
|
||||
}
|
||||
|
||||
const debugLostCancel = false
|
||||
|
||||
var contextPackage = "context"
|
||||
|
||||
// checkLostCancel reports a failure to the call the cancel function
|
||||
// returned by context.WithCancel, either because the variable was
|
||||
// assigned to the blank identifier, or because there exists a
|
||||
// control-flow path from the call to a return statement and that path
|
||||
// does not "use" the cancel function. Any reference to the variable
|
||||
// counts as a use, even within a nested function literal.
|
||||
//
|
||||
// checkLostCancel analyzes a single named or literal function.
|
||||
func checkLostCancel(f *File, node ast.Node) {
|
||||
// Fast path: bypass check if file doesn't use context.WithCancel.
|
||||
if !hasImport(f.file, contextPackage) {
|
||||
return
|
||||
}
|
||||
|
||||
// Maps each cancel variable to its defining ValueSpec/AssignStmt.
|
||||
cancelvars := make(map[*types.Var]ast.Node)
|
||||
|
||||
// Find the set of cancel vars to analyze.
|
||||
stack := make([]ast.Node, 0, 32)
|
||||
ast.Inspect(node, func(n ast.Node) bool {
|
||||
switch n.(type) {
|
||||
case *ast.FuncLit:
|
||||
if len(stack) > 0 {
|
||||
return false // don't stray into nested functions
|
||||
}
|
||||
case nil:
|
||||
stack = stack[:len(stack)-1] // pop
|
||||
return true
|
||||
}
|
||||
stack = append(stack, n) // push
|
||||
|
||||
// Look for [{AssignStmt,ValueSpec} CallExpr SelectorExpr]:
|
||||
//
|
||||
// ctx, cancel := context.WithCancel(...)
|
||||
// ctx, cancel = context.WithCancel(...)
|
||||
// var ctx, cancel = context.WithCancel(...)
|
||||
//
|
||||
if isContextWithCancel(f, n) && isCall(stack[len(stack)-2]) {
|
||||
var id *ast.Ident // id of cancel var
|
||||
stmt := stack[len(stack)-3]
|
||||
switch stmt := stmt.(type) {
|
||||
case *ast.ValueSpec:
|
||||
if len(stmt.Names) > 1 {
|
||||
id = stmt.Names[1]
|
||||
}
|
||||
case *ast.AssignStmt:
|
||||
if len(stmt.Lhs) > 1 {
|
||||
id, _ = stmt.Lhs[1].(*ast.Ident)
|
||||
}
|
||||
}
|
||||
if id != nil {
|
||||
if id.Name == "_" {
|
||||
f.Badf(id.Pos(), "the cancel function returned by context.%s should be called, not discarded, to avoid a context leak",
|
||||
n.(*ast.SelectorExpr).Sel.Name)
|
||||
} else if v, ok := f.pkg.uses[id].(*types.Var); ok {
|
||||
cancelvars[v] = stmt
|
||||
} else if v, ok := f.pkg.defs[id].(*types.Var); ok {
|
||||
cancelvars[v] = stmt
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
if len(cancelvars) == 0 {
|
||||
return // no need to build CFG
|
||||
}
|
||||
|
||||
// Tell the CFG builder which functions never return.
|
||||
info := &types.Info{Uses: f.pkg.uses, Selections: f.pkg.selectors}
|
||||
mayReturn := func(call *ast.CallExpr) bool {
|
||||
name := callName(info, call)
|
||||
return !noReturnFuncs[name]
|
||||
}
|
||||
|
||||
// Build the CFG.
|
||||
var g *cfg.CFG
|
||||
var sig *types.Signature
|
||||
switch node := node.(type) {
|
||||
case *ast.FuncDecl:
|
||||
obj := f.pkg.defs[node.Name]
|
||||
if obj == nil {
|
||||
return // type error (e.g. duplicate function declaration)
|
||||
}
|
||||
sig, _ = obj.Type().(*types.Signature)
|
||||
g = cfg.New(node.Body, mayReturn)
|
||||
case *ast.FuncLit:
|
||||
sig, _ = f.pkg.types[node.Type].Type.(*types.Signature)
|
||||
g = cfg.New(node.Body, mayReturn)
|
||||
}
|
||||
|
||||
// Print CFG.
|
||||
if debugLostCancel {
|
||||
fmt.Println(g.Format(f.fset))
|
||||
}
|
||||
|
||||
// Examine the CFG for each variable in turn.
|
||||
// (It would be more efficient to analyze all cancelvars in a
|
||||
// single pass over the AST, but seldom is there more than one.)
|
||||
for v, stmt := range cancelvars {
|
||||
if ret := lostCancelPath(f, g, v, stmt, sig); ret != nil {
|
||||
lineno := f.fset.Position(stmt.Pos()).Line
|
||||
f.Badf(stmt.Pos(), "the %s function is not used on all paths (possible context leak)", v.Name())
|
||||
f.Badf(ret.Pos(), "this return statement may be reached without using the %s var defined on line %d", v.Name(), lineno)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func isCall(n ast.Node) bool { _, ok := n.(*ast.CallExpr); return ok }
|
||||
|
||||
func hasImport(f *ast.File, path string) bool {
|
||||
for _, imp := range f.Imports {
|
||||
v, _ := strconv.Unquote(imp.Path.Value)
|
||||
if v == path {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// isContextWithCancel reports whether n is one of the qualified identifiers
|
||||
// context.With{Cancel,Timeout,Deadline}.
|
||||
func isContextWithCancel(f *File, n ast.Node) bool {
|
||||
if sel, ok := n.(*ast.SelectorExpr); ok {
|
||||
switch sel.Sel.Name {
|
||||
case "WithCancel", "WithTimeout", "WithDeadline":
|
||||
if x, ok := sel.X.(*ast.Ident); ok {
|
||||
if pkgname, ok := f.pkg.uses[x].(*types.PkgName); ok {
|
||||
return pkgname.Imported().Path() == contextPackage
|
||||
}
|
||||
// Import failed, so we can't check package path.
|
||||
// Just check the local package name (heuristic).
|
||||
return x.Name == "context"
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// lostCancelPath finds a path through the CFG, from stmt (which defines
|
||||
// the 'cancel' variable v) to a return statement, that doesn't "use" v.
|
||||
// If it finds one, it returns the return statement (which may be synthetic).
|
||||
// sig is the function's type, if known.
|
||||
func lostCancelPath(f *File, g *cfg.CFG, v *types.Var, stmt ast.Node, sig *types.Signature) *ast.ReturnStmt {
|
||||
vIsNamedResult := sig != nil && tupleContains(sig.Results(), v)
|
||||
|
||||
// uses reports whether stmts contain a "use" of variable v.
|
||||
uses := func(f *File, v *types.Var, stmts []ast.Node) bool {
|
||||
found := false
|
||||
for _, stmt := range stmts {
|
||||
ast.Inspect(stmt, func(n ast.Node) bool {
|
||||
switch n := n.(type) {
|
||||
case *ast.Ident:
|
||||
if f.pkg.uses[n] == v {
|
||||
found = true
|
||||
}
|
||||
case *ast.ReturnStmt:
|
||||
// A naked return statement counts as a use
|
||||
// of the named result variables.
|
||||
if n.Results == nil && vIsNamedResult {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
return !found
|
||||
})
|
||||
}
|
||||
return found
|
||||
}
|
||||
|
||||
// blockUses computes "uses" for each block, caching the result.
|
||||
memo := make(map[*cfg.Block]bool)
|
||||
blockUses := func(f *File, v *types.Var, b *cfg.Block) bool {
|
||||
res, ok := memo[b]
|
||||
if !ok {
|
||||
res = uses(f, v, b.Nodes)
|
||||
memo[b] = res
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// Find the var's defining block in the CFG,
|
||||
// plus the rest of the statements of that block.
|
||||
var defblock *cfg.Block
|
||||
var rest []ast.Node
|
||||
outer:
|
||||
for _, b := range g.Blocks {
|
||||
for i, n := range b.Nodes {
|
||||
if n == stmt {
|
||||
defblock = b
|
||||
rest = b.Nodes[i+1:]
|
||||
break outer
|
||||
}
|
||||
}
|
||||
}
|
||||
if defblock == nil {
|
||||
panic("internal error: can't find defining block for cancel var")
|
||||
}
|
||||
|
||||
// Is v "used" in the remainder of its defining block?
|
||||
if uses(f, v, rest) {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Does the defining block return without using v?
|
||||
if ret := defblock.Return(); ret != nil {
|
||||
return ret
|
||||
}
|
||||
|
||||
// Search the CFG depth-first for a path, from defblock to a
|
||||
// return block, in which v is never "used".
|
||||
seen := make(map[*cfg.Block]bool)
|
||||
var search func(blocks []*cfg.Block) *ast.ReturnStmt
|
||||
search = func(blocks []*cfg.Block) *ast.ReturnStmt {
|
||||
for _, b := range blocks {
|
||||
if !seen[b] {
|
||||
seen[b] = true
|
||||
|
||||
// Prune the search if the block uses v.
|
||||
if blockUses(f, v, b) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Found path to return statement?
|
||||
if ret := b.Return(); ret != nil {
|
||||
if debugLostCancel {
|
||||
fmt.Printf("found path to return in block %s\n", b)
|
||||
}
|
||||
return ret // found
|
||||
}
|
||||
|
||||
// Recur
|
||||
if ret := search(b.Succs); ret != nil {
|
||||
if debugLostCancel {
|
||||
fmt.Printf(" from block %s\n", b)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return search(defblock.Succs)
|
||||
}
|
||||
|
||||
func tupleContains(tuple *types.Tuple, v *types.Var) bool {
|
||||
for i := 0; i < tuple.Len(); i++ {
|
||||
if tuple.At(i) == v {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var noReturnFuncs = map[string]bool{
|
||||
"(*testing.common).FailNow": true,
|
||||
"(*testing.common).Fatal": true,
|
||||
"(*testing.common).Fatalf": true,
|
||||
"(*testing.common).Skip": true,
|
||||
"(*testing.common).SkipNow": true,
|
||||
"(*testing.common).Skipf": true,
|
||||
"log.Fatal": true,
|
||||
"log.Fatalf": true,
|
||||
"log.Fatalln": true,
|
||||
"os.Exit": true,
|
||||
"panic": true,
|
||||
"runtime.Goexit": true,
|
||||
}
|
||||
|
||||
// callName returns the canonical name of the builtin, method, or
|
||||
// function called by call, if known.
|
||||
func callName(info *types.Info, call *ast.CallExpr) string {
|
||||
switch fun := call.Fun.(type) {
|
||||
case *ast.Ident:
|
||||
// builtin, e.g. "panic"
|
||||
if obj, ok := info.Uses[fun].(*types.Builtin); ok {
|
||||
return obj.Name()
|
||||
}
|
||||
case *ast.SelectorExpr:
|
||||
if sel, ok := info.Selections[fun]; ok && sel.Kind() == types.MethodVal {
|
||||
// method call, e.g. "(*testing.common).Fatal"
|
||||
meth := sel.Obj()
|
||||
return fmt.Sprintf("(%s).%s",
|
||||
meth.Type().(*types.Signature).Recv().Type(),
|
||||
meth.Name())
|
||||
}
|
||||
if obj, ok := info.Uses[fun.Sel]; ok {
|
||||
// qualified identifier, e.g. "os.Exit"
|
||||
return fmt.Sprintf("%s.%s",
|
||||
obj.Pkg().Path(),
|
||||
obj.Name())
|
||||
}
|
||||
}
|
||||
|
||||
// function with no name, or defined in missing imported package
|
||||
return ""
|
||||
}
|
614
vendor/github.com/golangci/govet/main.go
generated
vendored
614
vendor/github.com/golangci/govet/main.go
generated
vendored
|
@ -1,614 +0,0 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Vet is a simple checker for static errors in Go source code.
|
||||
// See doc.go for more information.
|
||||
package govet
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/importer"
|
||||
"go/printer"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/loader"
|
||||
)
|
||||
|
||||
// Important! If you add flags here, make sure to update cmd/go/internal/vet/vetflag.go.
|
||||
|
||||
var (
|
||||
verbose = flag.Bool("v", true, "verbose")
|
||||
source = flag.Bool("source", false, "import from source instead of compiled object files")
|
||||
tags = flag.String("tags", "", "space-separated list of build tags to apply when parsing")
|
||||
tagList = []string{} // exploded version of tags flag; set in main
|
||||
|
||||
vcfg vetConfig
|
||||
mustTypecheck bool
|
||||
)
|
||||
|
||||
var exitCode = 0
|
||||
|
||||
// "-all" flag enables all non-experimental checks
|
||||
var all = triStateFlag("all", unset, "enable all non-experimental checks")
|
||||
|
||||
// Flags to control which individual checks to perform.
|
||||
var report = map[string]*triState{
|
||||
// Only unusual checks are written here.
|
||||
// Most checks that operate during the AST walk are added by register.
|
||||
"asmdecl": triStateFlag("asmdecl", unset, "check assembly against Go declarations"),
|
||||
"buildtags": triStateFlag("buildtags", unset, "check that +build tags are valid"),
|
||||
}
|
||||
|
||||
// experimental records the flags enabling experimental features. These must be
|
||||
// requested explicitly; they are not enabled by -all.
|
||||
var experimental = map[string]bool{}
|
||||
|
||||
// setTrueCount record how many flags are explicitly set to true.
|
||||
var setTrueCount int
|
||||
|
||||
// dirsRun and filesRun indicate whether the vet is applied to directory or
|
||||
// file targets. The distinction affects which checks are run.
|
||||
var dirsRun, filesRun bool
|
||||
|
||||
// includesNonTest indicates whether the vet is applied to non-test targets.
|
||||
// Certain checks are relevant only if they touch both test and non-test files.
|
||||
var includesNonTest bool
|
||||
|
||||
// A triState is a boolean that knows whether it has been set to either true or false.
|
||||
// It is used to identify if a flag appears; the standard boolean flag cannot
|
||||
// distinguish missing from unset. It also satisfies flag.Value.
|
||||
type triState int
|
||||
|
||||
const (
|
||||
unset triState = iota
|
||||
setTrue
|
||||
setFalse
|
||||
)
|
||||
|
||||
func triStateFlag(name string, value triState, usage string) *triState {
|
||||
flag.Var(&value, name, usage)
|
||||
return &value
|
||||
}
|
||||
|
||||
// triState implements flag.Value, flag.Getter, and flag.boolFlag.
|
||||
// They work like boolean flags: we can say vet -printf as well as vet -printf=true
|
||||
func (ts *triState) Get() interface{} {
|
||||
return *ts == setTrue
|
||||
}
|
||||
|
||||
func (ts triState) isTrue() bool {
|
||||
return ts == setTrue
|
||||
}
|
||||
|
||||
func (ts *triState) Set(value string) error {
|
||||
b, err := strconv.ParseBool(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if b {
|
||||
*ts = setTrue
|
||||
setTrueCount++
|
||||
} else {
|
||||
*ts = setFalse
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ts *triState) String() string {
|
||||
switch *ts {
|
||||
case unset:
|
||||
return "true" // An unset flag will be set by -all, so defaults to true.
|
||||
case setTrue:
|
||||
return "true"
|
||||
case setFalse:
|
||||
return "false"
|
||||
}
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
func (ts triState) IsBoolFlag() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// vet tells whether to report errors for the named check, a flag name.
|
||||
func vet(name string) bool {
|
||||
return report[name].isTrue()
|
||||
}
|
||||
|
||||
// setExit sets the value for os.Exit when it is called, later. It
|
||||
// remembers the highest value.
|
||||
func setExit(err int) {
|
||||
if err > exitCode {
|
||||
exitCode = err
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
// Each of these vars has a corresponding case in (*File).Visit.
|
||||
assignStmt *ast.AssignStmt
|
||||
binaryExpr *ast.BinaryExpr
|
||||
callExpr *ast.CallExpr
|
||||
compositeLit *ast.CompositeLit
|
||||
exprStmt *ast.ExprStmt
|
||||
forStmt *ast.ForStmt
|
||||
funcDecl *ast.FuncDecl
|
||||
funcLit *ast.FuncLit
|
||||
genDecl *ast.GenDecl
|
||||
interfaceType *ast.InterfaceType
|
||||
rangeStmt *ast.RangeStmt
|
||||
returnStmt *ast.ReturnStmt
|
||||
structType *ast.StructType
|
||||
|
||||
// checkers is a two-level map.
|
||||
// The outer level is keyed by a nil pointer, one of the AST vars above.
|
||||
// The inner level is keyed by checker name.
|
||||
checkers = make(map[ast.Node]map[string]func(*File, ast.Node))
|
||||
)
|
||||
|
||||
func register(name, usage string, fn func(*File, ast.Node), types ...ast.Node) {
|
||||
report[name] = triStateFlag(name, unset, usage)
|
||||
for _, typ := range types {
|
||||
m := checkers[typ]
|
||||
if m == nil {
|
||||
m = make(map[string]func(*File, ast.Node))
|
||||
checkers[typ] = m
|
||||
}
|
||||
m[name] = fn
|
||||
}
|
||||
}
|
||||
|
||||
// Usage is a replacement usage function for the flags package.
|
||||
func Usage() {
|
||||
fmt.Fprintf(os.Stderr, "Usage of vet:\n")
|
||||
fmt.Fprintf(os.Stderr, "\tvet [flags] directory...\n")
|
||||
fmt.Fprintf(os.Stderr, "\tvet [flags] files... # Must be a single package\n")
|
||||
fmt.Fprintf(os.Stderr, "By default, -all is set and all non-experimental checks are run.\n")
|
||||
fmt.Fprintf(os.Stderr, "For more information run\n")
|
||||
fmt.Fprintf(os.Stderr, "\tgo doc cmd/vet\n\n")
|
||||
fmt.Fprintf(os.Stderr, "Flags:\n")
|
||||
flag.PrintDefaults()
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
// File is a wrapper for the state of a file used in the parser.
|
||||
// The parse tree walkers are all methods of this type.
|
||||
type File struct {
|
||||
pkg *Package
|
||||
fset *token.FileSet
|
||||
name string
|
||||
content []byte
|
||||
file *ast.File
|
||||
b bytes.Buffer // for use by methods
|
||||
|
||||
// Parsed package "foo" when checking package "foo_test"
|
||||
basePkg *Package
|
||||
|
||||
// The keys are the objects that are receivers of a "String()
|
||||
// string" method. The value reports whether the method has a
|
||||
// pointer receiver.
|
||||
// This is used by the recursiveStringer method in print.go.
|
||||
stringerPtrs map[*ast.Object]bool
|
||||
|
||||
// Registered checkers to run.
|
||||
checkers map[ast.Node][]func(*File, ast.Node)
|
||||
|
||||
// Unreachable nodes; can be ignored in shift check.
|
||||
dead map[ast.Node]bool
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Usage = Usage
|
||||
flag.Parse()
|
||||
|
||||
// If any flag is set, we run only those checks requested.
|
||||
// If all flag is set true or if no flags are set true, set all the non-experimental ones
|
||||
// not explicitly set (in effect, set the "-all" flag).
|
||||
if setTrueCount == 0 || *all == setTrue {
|
||||
for name, setting := range report {
|
||||
if *setting == unset && !experimental[name] {
|
||||
*setting = setTrue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Accept space-separated tags because that matches
|
||||
// the go command's other subcommands.
|
||||
// Accept commas because go tool vet traditionally has.
|
||||
tagList = strings.Fields(strings.Replace(*tags, ",", " ", -1))
|
||||
|
||||
initPrintFlags()
|
||||
initUnusedFlags()
|
||||
|
||||
if flag.NArg() == 0 {
|
||||
Usage()
|
||||
}
|
||||
|
||||
// Special case for "go vet" passing an explicit configuration:
|
||||
// single argument ending in vet.cfg.
|
||||
// Once we have a more general mechanism for obtaining this
|
||||
// information from build tools like the go command,
|
||||
// vet should be changed to use it. This vet.cfg hack is an
|
||||
// experiment to learn about what form that information should take.
|
||||
if flag.NArg() == 1 && strings.HasSuffix(flag.Arg(0), "vet.cfg") {
|
||||
doPackageCfg(flag.Arg(0))
|
||||
os.Exit(exitCode)
|
||||
}
|
||||
|
||||
for _, name := range flag.Args() {
|
||||
// Is it a directory?
|
||||
fi, err := os.Stat(name)
|
||||
if err != nil {
|
||||
warnf("error walking tree: %s", err)
|
||||
continue
|
||||
}
|
||||
if fi.IsDir() {
|
||||
dirsRun = true
|
||||
} else {
|
||||
filesRun = true
|
||||
if !strings.HasSuffix(name, "_test.go") {
|
||||
includesNonTest = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if dirsRun && filesRun {
|
||||
Usage()
|
||||
}
|
||||
if dirsRun {
|
||||
for _, name := range flag.Args() {
|
||||
walkDir(name)
|
||||
}
|
||||
os.Exit(exitCode)
|
||||
}
|
||||
if pkg, _ := doPackage(nil, nil, nil, nil, nil); pkg == nil {
|
||||
warnf("no files checked")
|
||||
}
|
||||
os.Exit(exitCode)
|
||||
}
|
||||
|
||||
// prefixDirectory places the directory name on the beginning of each name in the list.
|
||||
func prefixDirectory(directory string, names []string) {
|
||||
if directory != "." {
|
||||
for i, name := range names {
|
||||
names[i] = filepath.Join(directory, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// vetConfig is the JSON config struct prepared by the Go command.
|
||||
type vetConfig struct {
|
||||
Compiler string
|
||||
Dir string
|
||||
ImportPath string
|
||||
GoFiles []string
|
||||
ImportMap map[string]string
|
||||
PackageFile map[string]string
|
||||
|
||||
SucceedOnTypecheckFailure bool
|
||||
|
||||
imp types.Importer
|
||||
}
|
||||
|
||||
func (v *vetConfig) Import(path string) (*types.Package, error) {
|
||||
if v.imp == nil {
|
||||
v.imp = importer.For(v.Compiler, v.openPackageFile)
|
||||
}
|
||||
if path == "unsafe" {
|
||||
return v.imp.Import("unsafe")
|
||||
}
|
||||
p := v.ImportMap[path]
|
||||
if p == "" {
|
||||
return nil, fmt.Errorf("unknown import path %q", path)
|
||||
}
|
||||
if v.PackageFile[p] == "" {
|
||||
return nil, fmt.Errorf("unknown package file for import %q", path)
|
||||
}
|
||||
return v.imp.Import(p)
|
||||
}
|
||||
|
||||
func (v *vetConfig) openPackageFile(path string) (io.ReadCloser, error) {
|
||||
file := v.PackageFile[path]
|
||||
if file == "" {
|
||||
// Note that path here has been translated via v.ImportMap,
|
||||
// unlike in the error in Import above. We prefer the error in
|
||||
// Import, but it's worth diagnosing this one too, just in case.
|
||||
return nil, fmt.Errorf("unknown package file for %q", path)
|
||||
}
|
||||
f, err := os.Open(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
// doPackageCfg analyzes a single package described in a config file.
|
||||
func doPackageCfg(cfgFile string) {
|
||||
js, err := ioutil.ReadFile(cfgFile)
|
||||
if err != nil {
|
||||
errorf("%v", err)
|
||||
}
|
||||
if err := json.Unmarshal(js, &vcfg); err != nil {
|
||||
errorf("parsing vet config %s: %v", cfgFile, err)
|
||||
}
|
||||
stdImporter = &vcfg
|
||||
inittypes()
|
||||
mustTypecheck = true
|
||||
doPackage(nil, nil, nil, nil, nil)
|
||||
}
|
||||
|
||||
// doPackageDir analyzes the single package found in the directory, if there is one,
|
||||
// plus a test package, if there is one.
|
||||
func doPackageDir(directory string) {
|
||||
context := build.Default
|
||||
if len(context.BuildTags) != 0 {
|
||||
warnf("build tags %s previously set", context.BuildTags)
|
||||
}
|
||||
context.BuildTags = append(tagList, context.BuildTags...)
|
||||
|
||||
pkg, err := context.ImportDir(directory, 0)
|
||||
if err != nil {
|
||||
// If it's just that there are no go source files, that's fine.
|
||||
if _, nogo := err.(*build.NoGoError); nogo {
|
||||
return
|
||||
}
|
||||
// Non-fatal: we are doing a recursive walk and there may be other directories.
|
||||
warnf("cannot process directory %s: %s", directory, err)
|
||||
return
|
||||
}
|
||||
var names []string
|
||||
names = append(names, pkg.GoFiles...)
|
||||
names = append(names, pkg.CgoFiles...)
|
||||
names = append(names, pkg.TestGoFiles...) // These are also in the "foo" package.
|
||||
names = append(names, pkg.SFiles...)
|
||||
prefixDirectory(directory, names)
|
||||
basePkg, _ := doPackage(nil, nil, nil, nil, nil)
|
||||
// Is there also a "foo_test" package? If so, do that one as well.
|
||||
if len(pkg.XTestGoFiles) > 0 {
|
||||
names = pkg.XTestGoFiles
|
||||
prefixDirectory(directory, names)
|
||||
doPackage(basePkg, nil, nil, nil, nil)
|
||||
}
|
||||
}
|
||||
|
||||
type Package struct {
|
||||
path string
|
||||
defs map[*ast.Ident]types.Object
|
||||
uses map[*ast.Ident]types.Object
|
||||
selectors map[*ast.SelectorExpr]*types.Selection
|
||||
types map[ast.Expr]types.TypeAndValue
|
||||
spans map[types.Object]Span
|
||||
files []*File
|
||||
typesPkg *types.Package
|
||||
}
|
||||
|
||||
type astFilePathGetter func(f *ast.File, fset *token.FileSet) (string, error)
|
||||
|
||||
// doPackage analyzes the single package constructed from the named files.
|
||||
// It returns the parsed Package or nil if none of the files have been checked.
|
||||
func doPackage(basePkg *Package, pkgInfo *loader.PackageInfo, fs *token.FileSet, astFiles []*ast.File, pg astFilePathGetter) (*Package, error) {
|
||||
var files []*File
|
||||
for _, parsedFile := range astFiles {
|
||||
name, err := pg(parsedFile, fs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data, err := ioutil.ReadFile(name)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("can't read %q: %s", name, err)
|
||||
}
|
||||
|
||||
checkBuildTag(name, data)
|
||||
|
||||
files = append(files, &File{
|
||||
fset: fs,
|
||||
content: data,
|
||||
name: name,
|
||||
file: parsedFile,
|
||||
dead: make(map[ast.Node]bool),
|
||||
})
|
||||
}
|
||||
|
||||
pkg := new(Package)
|
||||
pkg.path = astFiles[0].Name.Name
|
||||
pkg.files = files
|
||||
// Type check the package.
|
||||
errs := pkg.check(fs, astFiles, pkgInfo)
|
||||
if errs != nil {
|
||||
errors := []string{}
|
||||
for _, err := range errs {
|
||||
errors = append(errors, err.Error())
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("can't typecheck package: %s", strings.Join(errors, "|"))
|
||||
}
|
||||
|
||||
// Check.
|
||||
chk := make(map[ast.Node][]func(*File, ast.Node))
|
||||
for typ, set := range checkers {
|
||||
for name, fn := range set {
|
||||
if vet(name) {
|
||||
chk[typ] = append(chk[typ], fn)
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, file := range files {
|
||||
file.pkg = pkg
|
||||
file.basePkg = basePkg
|
||||
file.checkers = chk
|
||||
if file.file != nil {
|
||||
file.walkFile(file.name, file.file)
|
||||
}
|
||||
}
|
||||
asmCheck(pkg)
|
||||
return pkg, nil
|
||||
}
|
||||
|
||||
func visit(path string, f os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
warnf("walk error: %s", err)
|
||||
return err
|
||||
}
|
||||
// One package per directory. Ignore the files themselves.
|
||||
if !f.IsDir() {
|
||||
return nil
|
||||
}
|
||||
doPackageDir(path)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pkg *Package) hasFileWithSuffix(suffix string) bool {
|
||||
for _, f := range pkg.files {
|
||||
if strings.HasSuffix(f.name, suffix) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// walkDir recursively walks the tree looking for Go packages.
|
||||
func walkDir(root string) {
|
||||
filepath.Walk(root, visit)
|
||||
}
|
||||
|
||||
// errorf formats the error to standard error, adding program
|
||||
// identification and a newline, and exits.
|
||||
func errorf(format string, args ...interface{}) {
|
||||
fmt.Fprintf(os.Stderr, "vet: "+format+"\n", args...)
|
||||
}
|
||||
|
||||
// warnf formats the error to standard error, adding program
|
||||
// identification and a newline, but does not exit.
|
||||
func warnf(format string, args ...interface{}) {
|
||||
fmt.Fprintf(os.Stderr, "vet: "+format+"\n", args...)
|
||||
setExit(1)
|
||||
}
|
||||
|
||||
// Println is fmt.Println guarded by -v.
|
||||
func Println(args ...interface{}) {
|
||||
if !*verbose {
|
||||
return
|
||||
}
|
||||
fmt.Println(args...)
|
||||
}
|
||||
|
||||
// Printf is fmt.Printf guarded by -v.
|
||||
func Printf(format string, args ...interface{}) {
|
||||
if !*verbose {
|
||||
return
|
||||
}
|
||||
fmt.Printf(format+"\n", args...)
|
||||
}
|
||||
|
||||
// Bad reports an error and sets the exit code..
|
||||
func (f *File) Bad(pos token.Pos, args ...interface{}) {
|
||||
foundIssues = append(foundIssues, Issue{
|
||||
Pos: f.fset.Position(pos),
|
||||
Message: fmt.Sprint(args...),
|
||||
})
|
||||
f.Warn(pos, args...)
|
||||
setExit(1)
|
||||
}
|
||||
|
||||
// Badf reports a formatted error and sets the exit code.
|
||||
func (f *File) Badf(pos token.Pos, format string, args ...interface{}) {
|
||||
foundIssues = append(foundIssues, Issue{
|
||||
Pos: f.fset.Position(pos),
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
})
|
||||
f.Warnf(pos, format, args...)
|
||||
setExit(1)
|
||||
}
|
||||
|
||||
// loc returns a formatted representation of the position.
|
||||
func (f *File) loc(pos token.Pos) string {
|
||||
if pos == token.NoPos {
|
||||
return ""
|
||||
}
|
||||
// Do not print columns. Because the pos often points to the start of an
|
||||
// expression instead of the inner part with the actual error, the
|
||||
// precision can mislead.
|
||||
posn := f.fset.Position(pos)
|
||||
return fmt.Sprintf("%s:%d", f.name, posn.Line)
|
||||
}
|
||||
|
||||
// locPrefix returns a formatted representation of the position for use as a line prefix.
|
||||
func (f *File) locPrefix(pos token.Pos) string {
|
||||
if pos == token.NoPos {
|
||||
return ""
|
||||
}
|
||||
return fmt.Sprintf("%s: ", f.loc(pos))
|
||||
}
|
||||
|
||||
// Warn reports an error but does not set the exit code.
|
||||
func (f *File) Warn(pos token.Pos, args ...interface{}) {
|
||||
fmt.Fprintf(os.Stderr, "%s%s", f.locPrefix(pos), fmt.Sprintln(args...))
|
||||
}
|
||||
|
||||
// Warnf reports a formatted error but does not set the exit code.
|
||||
func (f *File) Warnf(pos token.Pos, format string, args ...interface{}) {
|
||||
fmt.Fprintf(os.Stderr, "%s%s\n", f.locPrefix(pos), fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
// walkFile walks the file's tree.
|
||||
func (f *File) walkFile(name string, file *ast.File) {
|
||||
Println("Checking file", name)
|
||||
ast.Walk(f, file)
|
||||
}
|
||||
|
||||
// Visit implements the ast.Visitor interface.
|
||||
func (f *File) Visit(node ast.Node) ast.Visitor {
|
||||
f.updateDead(node)
|
||||
var key ast.Node
|
||||
switch node.(type) {
|
||||
case *ast.AssignStmt:
|
||||
key = assignStmt
|
||||
case *ast.BinaryExpr:
|
||||
key = binaryExpr
|
||||
case *ast.CallExpr:
|
||||
key = callExpr
|
||||
case *ast.CompositeLit:
|
||||
key = compositeLit
|
||||
case *ast.ExprStmt:
|
||||
key = exprStmt
|
||||
case *ast.ForStmt:
|
||||
key = forStmt
|
||||
case *ast.FuncDecl:
|
||||
key = funcDecl
|
||||
case *ast.FuncLit:
|
||||
key = funcLit
|
||||
case *ast.GenDecl:
|
||||
key = genDecl
|
||||
case *ast.InterfaceType:
|
||||
key = interfaceType
|
||||
case *ast.RangeStmt:
|
||||
key = rangeStmt
|
||||
case *ast.ReturnStmt:
|
||||
key = returnStmt
|
||||
case *ast.StructType:
|
||||
key = structType
|
||||
}
|
||||
for _, fn := range f.checkers[key] {
|
||||
fn(f, node)
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
// gofmt returns a string representation of the expression.
|
||||
func (f *File) gofmt(x ast.Expr) string {
|
||||
f.b.Reset()
|
||||
printer.Fprint(&f.b, f.fset, x)
|
||||
return f.b.String()
|
||||
}
|
67
vendor/github.com/golangci/govet/nilfunc.go
generated
vendored
67
vendor/github.com/golangci/govet/nilfunc.go
generated
vendored
|
@ -1,67 +0,0 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
This file contains the code to check for useless function comparisons.
|
||||
A useless comparison is one like f == nil as opposed to f() == nil.
|
||||
*/
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("nilfunc",
|
||||
"check for comparisons between functions and nil",
|
||||
checkNilFuncComparison,
|
||||
binaryExpr)
|
||||
}
|
||||
|
||||
func checkNilFuncComparison(f *File, node ast.Node) {
|
||||
e := node.(*ast.BinaryExpr)
|
||||
|
||||
// Only want == or != comparisons.
|
||||
if e.Op != token.EQL && e.Op != token.NEQ {
|
||||
return
|
||||
}
|
||||
|
||||
// Only want comparisons with a nil identifier on one side.
|
||||
var e2 ast.Expr
|
||||
switch {
|
||||
case f.isNil(e.X):
|
||||
e2 = e.Y
|
||||
case f.isNil(e.Y):
|
||||
e2 = e.X
|
||||
default:
|
||||
return
|
||||
}
|
||||
|
||||
// Only want identifiers or selector expressions.
|
||||
var obj types.Object
|
||||
switch v := e2.(type) {
|
||||
case *ast.Ident:
|
||||
obj = f.pkg.uses[v]
|
||||
case *ast.SelectorExpr:
|
||||
obj = f.pkg.uses[v.Sel]
|
||||
default:
|
||||
return
|
||||
}
|
||||
|
||||
// Only want functions.
|
||||
if _, ok := obj.(*types.Func); !ok {
|
||||
return
|
||||
}
|
||||
|
||||
f.Badf(e.Pos(), "comparison of function %v %v nil is always %v", obj.Name(), e.Op, e.Op == token.NEQ)
|
||||
}
|
||||
|
||||
// isNil reports whether the provided expression is the built-in nil
|
||||
// identifier.
|
||||
func (f *File) isNil(e ast.Expr) bool {
|
||||
return f.pkg.types[e].Type == types.Typ[types.UntypedNil]
|
||||
}
|
807
vendor/github.com/golangci/govet/print.go
generated
vendored
807
vendor/github.com/golangci/govet/print.go
generated
vendored
|
@ -1,807 +0,0 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the printf-checker.
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/constant"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
var printfuncs = flag.String("printfuncs", "", "comma-separated list of print function names to check")
|
||||
|
||||
func init() {
|
||||
register("printf",
|
||||
"check printf-like invocations",
|
||||
checkFmtPrintfCall,
|
||||
funcDecl, callExpr)
|
||||
}
|
||||
|
||||
func initPrintFlags() {
|
||||
if *printfuncs == "" {
|
||||
return
|
||||
}
|
||||
for _, name := range strings.Split(*printfuncs, ",") {
|
||||
if len(name) == 0 {
|
||||
flag.Usage()
|
||||
}
|
||||
|
||||
// Backwards compatibility: skip optional first argument
|
||||
// index after the colon.
|
||||
if colon := strings.LastIndex(name, ":"); colon > 0 {
|
||||
name = name[:colon]
|
||||
}
|
||||
|
||||
isPrint[strings.ToLower(name)] = true
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(rsc): Incorporate user-defined printf wrappers again.
|
||||
// The general plan is to allow vet of one package P to output
|
||||
// additional information to supply to later vets of packages
|
||||
// importing P. Then vet of P can record a list of printf wrappers
|
||||
// and the later vet using P.Printf will find it in the list and check it.
|
||||
// That's not ready for Go 1.10.
|
||||
// When that does happen, uncomment the user-defined printf
|
||||
// wrapper tests in testdata/print.go.
|
||||
|
||||
// isPrint records the print functions.
|
||||
// If a key ends in 'f' then it is assumed to be a formatted print.
|
||||
var isPrint = map[string]bool{
|
||||
"fmt.Errorf": true,
|
||||
"fmt.Fprint": true,
|
||||
"fmt.Fprintf": true,
|
||||
"fmt.Fprintln": true,
|
||||
"fmt.Print": true,
|
||||
"fmt.Printf": true,
|
||||
"fmt.Println": true,
|
||||
"fmt.Sprint": true,
|
||||
"fmt.Sprintf": true,
|
||||
"fmt.Sprintln": true,
|
||||
"log.Fatal": true,
|
||||
"log.Fatalf": true,
|
||||
"log.Fatalln": true,
|
||||
"log.Logger.Fatal": true,
|
||||
"log.Logger.Fatalf": true,
|
||||
"log.Logger.Fatalln": true,
|
||||
"log.Logger.Panic": true,
|
||||
"log.Logger.Panicf": true,
|
||||
"log.Logger.Panicln": true,
|
||||
"log.Logger.Printf": true,
|
||||
"log.Logger.Println": true,
|
||||
"log.Panic": true,
|
||||
"log.Panicf": true,
|
||||
"log.Panicln": true,
|
||||
"log.Print": true,
|
||||
"log.Printf": true,
|
||||
"log.Println": true,
|
||||
"testing.B.Error": true,
|
||||
"testing.B.Errorf": true,
|
||||
"testing.B.Fatal": true,
|
||||
"testing.B.Fatalf": true,
|
||||
"testing.B.Log": true,
|
||||
"testing.B.Logf": true,
|
||||
"testing.B.Skip": true,
|
||||
"testing.B.Skipf": true,
|
||||
"testing.T.Error": true,
|
||||
"testing.T.Errorf": true,
|
||||
"testing.T.Fatal": true,
|
||||
"testing.T.Fatalf": true,
|
||||
"testing.T.Log": true,
|
||||
"testing.T.Logf": true,
|
||||
"testing.T.Skip": true,
|
||||
"testing.T.Skipf": true,
|
||||
"testing.TB.Error": true,
|
||||
"testing.TB.Errorf": true,
|
||||
"testing.TB.Fatal": true,
|
||||
"testing.TB.Fatalf": true,
|
||||
"testing.TB.Log": true,
|
||||
"testing.TB.Logf": true,
|
||||
"testing.TB.Skip": true,
|
||||
"testing.TB.Skipf": true,
|
||||
}
|
||||
|
||||
// formatString returns the format string argument and its index within
|
||||
// the given printf-like call expression.
|
||||
//
|
||||
// The last parameter before variadic arguments is assumed to be
|
||||
// a format string.
|
||||
//
|
||||
// The first string literal or string constant is assumed to be a format string
|
||||
// if the call's signature cannot be determined.
|
||||
//
|
||||
// If it cannot find any format string parameter, it returns ("", -1).
|
||||
func formatString(f *File, call *ast.CallExpr) (format string, idx int) {
|
||||
typ := f.pkg.types[call.Fun].Type
|
||||
if typ != nil {
|
||||
if sig, ok := typ.(*types.Signature); ok {
|
||||
if !sig.Variadic() {
|
||||
// Skip checking non-variadic functions.
|
||||
return "", -1
|
||||
}
|
||||
idx := sig.Params().Len() - 2
|
||||
if idx < 0 {
|
||||
// Skip checking variadic functions without
|
||||
// fixed arguments.
|
||||
return "", -1
|
||||
}
|
||||
s, ok := stringConstantArg(f, call, idx)
|
||||
if !ok {
|
||||
// The last argument before variadic args isn't a string.
|
||||
return "", -1
|
||||
}
|
||||
return s, idx
|
||||
}
|
||||
}
|
||||
|
||||
// Cannot determine call's signature. Fall back to scanning for the first
|
||||
// string constant in the call.
|
||||
for idx := range call.Args {
|
||||
if s, ok := stringConstantArg(f, call, idx); ok {
|
||||
return s, idx
|
||||
}
|
||||
if f.pkg.types[call.Args[idx]].Type == types.Typ[types.String] {
|
||||
// Skip checking a call with a non-constant format
|
||||
// string argument, since its contents are unavailable
|
||||
// for validation.
|
||||
return "", -1
|
||||
}
|
||||
}
|
||||
return "", -1
|
||||
}
|
||||
|
||||
// stringConstantArg returns call's string constant argument at the index idx.
|
||||
//
|
||||
// ("", false) is returned if call's argument at the index idx isn't a string
|
||||
// constant.
|
||||
func stringConstantArg(f *File, call *ast.CallExpr, idx int) (string, bool) {
|
||||
if idx >= len(call.Args) {
|
||||
return "", false
|
||||
}
|
||||
arg := call.Args[idx]
|
||||
lit := f.pkg.types[arg].Value
|
||||
if lit != nil && lit.Kind() == constant.String {
|
||||
return constant.StringVal(lit), true
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
// checkCall triggers the print-specific checks if the call invokes a print function.
|
||||
func checkFmtPrintfCall(f *File, node ast.Node) {
|
||||
if f.pkg.typesPkg == nil {
|
||||
// This check now requires type information.
|
||||
return
|
||||
}
|
||||
|
||||
if d, ok := node.(*ast.FuncDecl); ok && isStringer(f, d) {
|
||||
// Remember we saw this.
|
||||
if f.stringerPtrs == nil {
|
||||
f.stringerPtrs = make(map[*ast.Object]bool)
|
||||
}
|
||||
if l := d.Recv.List; len(l) == 1 {
|
||||
if n := l[0].Names; len(n) == 1 {
|
||||
typ := f.pkg.types[l[0].Type]
|
||||
_, ptrRecv := typ.Type.(*types.Pointer)
|
||||
f.stringerPtrs[n[0].Obj] = ptrRecv
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
call, ok := node.(*ast.CallExpr)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
// Construct name like pkg.Printf or pkg.Type.Printf for lookup.
|
||||
var name string
|
||||
switch x := call.Fun.(type) {
|
||||
case *ast.Ident:
|
||||
if fn, ok := f.pkg.uses[x].(*types.Func); ok {
|
||||
var pkg string
|
||||
if fn.Pkg() == nil || fn.Pkg() == f.pkg.typesPkg {
|
||||
pkg = vcfg.ImportPath
|
||||
} else {
|
||||
pkg = fn.Pkg().Path()
|
||||
}
|
||||
name = pkg + "." + x.Name
|
||||
break
|
||||
}
|
||||
|
||||
case *ast.SelectorExpr:
|
||||
// Check for "fmt.Printf".
|
||||
if id, ok := x.X.(*ast.Ident); ok {
|
||||
if pkgName, ok := f.pkg.uses[id].(*types.PkgName); ok {
|
||||
name = pkgName.Imported().Path() + "." + x.Sel.Name
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Check for t.Logf where t is a *testing.T.
|
||||
if sel := f.pkg.selectors[x]; sel != nil {
|
||||
recv := sel.Recv()
|
||||
if p, ok := recv.(*types.Pointer); ok {
|
||||
recv = p.Elem()
|
||||
}
|
||||
if named, ok := recv.(*types.Named); ok {
|
||||
obj := named.Obj()
|
||||
var pkg string
|
||||
if obj.Pkg() == nil || obj.Pkg() == f.pkg.typesPkg {
|
||||
pkg = vcfg.ImportPath
|
||||
} else {
|
||||
pkg = obj.Pkg().Path()
|
||||
}
|
||||
name = pkg + "." + obj.Name() + "." + x.Sel.Name
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if name == "" {
|
||||
return
|
||||
}
|
||||
|
||||
shortName := name[strings.LastIndex(name, ".")+1:]
|
||||
|
||||
_, ok = isPrint[name]
|
||||
if !ok {
|
||||
// Next look up just "printf", for use with -printfuncs.
|
||||
_, ok = isPrint[strings.ToLower(shortName)]
|
||||
}
|
||||
if ok {
|
||||
if strings.HasSuffix(name, "f") {
|
||||
f.checkPrintf(call, shortName)
|
||||
} else {
|
||||
f.checkPrint(call, shortName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// isStringer returns true if the provided declaration is a "String() string"
|
||||
// method, an implementation of fmt.Stringer.
|
||||
func isStringer(f *File, d *ast.FuncDecl) bool {
|
||||
return d.Recv != nil && d.Name.Name == "String" && d.Type.Results != nil &&
|
||||
len(d.Type.Params.List) == 0 && len(d.Type.Results.List) == 1 &&
|
||||
f.pkg.types[d.Type.Results.List[0].Type].Type == types.Typ[types.String]
|
||||
}
|
||||
|
||||
// isFormatter reports whether t satisfies fmt.Formatter.
|
||||
// Unlike fmt.Stringer, it's impossible to satisfy fmt.Formatter without importing fmt.
|
||||
func (f *File) isFormatter(t types.Type) bool {
|
||||
return formatterType != nil && types.Implements(t, formatterType)
|
||||
}
|
||||
|
||||
// formatState holds the parsed representation of a printf directive such as "%3.*[4]d".
|
||||
// It is constructed by parsePrintfVerb.
|
||||
type formatState struct {
|
||||
verb rune // the format verb: 'd' for "%d"
|
||||
format string // the full format directive from % through verb, "%.3d".
|
||||
name string // Printf, Sprintf etc.
|
||||
flags []byte // the list of # + etc.
|
||||
argNums []int // the successive argument numbers that are consumed, adjusted to refer to actual arg in call
|
||||
firstArg int // Index of first argument after the format in the Printf call.
|
||||
// Used only during parse.
|
||||
file *File
|
||||
call *ast.CallExpr
|
||||
argNum int // Which argument we're expecting to format now.
|
||||
hasIndex bool // Whether the argument is indexed.
|
||||
indexPending bool // Whether we have an indexed argument that has not resolved.
|
||||
nbytes int // number of bytes of the format string consumed.
|
||||
}
|
||||
|
||||
// checkPrintf checks a call to a formatted print routine such as Printf.
|
||||
func (f *File) checkPrintf(call *ast.CallExpr, name string) {
|
||||
format, idx := formatString(f, call)
|
||||
if idx < 0 {
|
||||
if *verbose {
|
||||
f.Warn(call.Pos(), "can't check non-constant format in call to", name)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
firstArg := idx + 1 // Arguments are immediately after format string.
|
||||
if !strings.Contains(format, "%") {
|
||||
if len(call.Args) > firstArg {
|
||||
f.Badf(call.Pos(), "%s call has arguments but no formatting directives", name)
|
||||
}
|
||||
return
|
||||
}
|
||||
// Hard part: check formats against args.
|
||||
argNum := firstArg
|
||||
maxArgNum := firstArg
|
||||
anyIndex := false
|
||||
for i, w := 0, 0; i < len(format); i += w {
|
||||
w = 1
|
||||
if format[i] != '%' {
|
||||
continue
|
||||
}
|
||||
state := f.parsePrintfVerb(call, name, format[i:], firstArg, argNum)
|
||||
if state == nil {
|
||||
return
|
||||
}
|
||||
w = len(state.format)
|
||||
if !f.okPrintfArg(call, state) { // One error per format is enough.
|
||||
return
|
||||
}
|
||||
if state.hasIndex {
|
||||
anyIndex = true
|
||||
}
|
||||
if len(state.argNums) > 0 {
|
||||
// Continue with the next sequential argument.
|
||||
argNum = state.argNums[len(state.argNums)-1] + 1
|
||||
}
|
||||
for _, n := range state.argNums {
|
||||
if n >= maxArgNum {
|
||||
maxArgNum = n + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
// Dotdotdot is hard.
|
||||
if call.Ellipsis.IsValid() && maxArgNum >= len(call.Args)-1 {
|
||||
return
|
||||
}
|
||||
// If any formats are indexed, extra arguments are ignored.
|
||||
if anyIndex {
|
||||
return
|
||||
}
|
||||
// There should be no leftover arguments.
|
||||
if maxArgNum != len(call.Args) {
|
||||
expect := maxArgNum - firstArg
|
||||
numArgs := len(call.Args) - firstArg
|
||||
f.Badf(call.Pos(), "%s call needs %v but has %v", name, count(expect, "arg"), count(numArgs, "arg"))
|
||||
}
|
||||
}
|
||||
|
||||
// parseFlags accepts any printf flags.
|
||||
func (s *formatState) parseFlags() {
|
||||
for s.nbytes < len(s.format) {
|
||||
switch c := s.format[s.nbytes]; c {
|
||||
case '#', '0', '+', '-', ' ':
|
||||
s.flags = append(s.flags, c)
|
||||
s.nbytes++
|
||||
default:
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// scanNum advances through a decimal number if present.
|
||||
func (s *formatState) scanNum() {
|
||||
for ; s.nbytes < len(s.format); s.nbytes++ {
|
||||
c := s.format[s.nbytes]
|
||||
if c < '0' || '9' < c {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseIndex scans an index expression. It returns false if there is a syntax error.
|
||||
func (s *formatState) parseIndex() bool {
|
||||
if s.nbytes == len(s.format) || s.format[s.nbytes] != '[' {
|
||||
return true
|
||||
}
|
||||
// Argument index present.
|
||||
s.nbytes++ // skip '['
|
||||
start := s.nbytes
|
||||
s.scanNum()
|
||||
ok := true
|
||||
if s.nbytes == len(s.format) || s.nbytes == start || s.format[s.nbytes] != ']' {
|
||||
ok = false
|
||||
s.nbytes = strings.Index(s.format, "]")
|
||||
if s.nbytes < 0 {
|
||||
s.file.Badf(s.call.Pos(), "%s format %s is missing closing ]", s.name, s.format)
|
||||
return false
|
||||
}
|
||||
}
|
||||
arg32, err := strconv.ParseInt(s.format[start:s.nbytes], 10, 32)
|
||||
if err != nil || !ok || arg32 <= 0 || arg32 > int64(len(s.call.Args)-s.firstArg) {
|
||||
s.file.Badf(s.call.Pos(), "%s format has invalid argument index [%s]", s.name, s.format[start:s.nbytes])
|
||||
return false
|
||||
}
|
||||
s.nbytes++ // skip ']'
|
||||
arg := int(arg32)
|
||||
arg += s.firstArg - 1 // We want to zero-index the actual arguments.
|
||||
s.argNum = arg
|
||||
s.hasIndex = true
|
||||
s.indexPending = true
|
||||
return true
|
||||
}
|
||||
|
||||
// parseNum scans a width or precision (or *). It returns false if there's a bad index expression.
|
||||
func (s *formatState) parseNum() bool {
|
||||
if s.nbytes < len(s.format) && s.format[s.nbytes] == '*' {
|
||||
if s.indexPending { // Absorb it.
|
||||
s.indexPending = false
|
||||
}
|
||||
s.nbytes++
|
||||
s.argNums = append(s.argNums, s.argNum)
|
||||
s.argNum++
|
||||
} else {
|
||||
s.scanNum()
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// parsePrecision scans for a precision. It returns false if there's a bad index expression.
|
||||
func (s *formatState) parsePrecision() bool {
|
||||
// If there's a period, there may be a precision.
|
||||
if s.nbytes < len(s.format) && s.format[s.nbytes] == '.' {
|
||||
s.flags = append(s.flags, '.') // Treat precision as a flag.
|
||||
s.nbytes++
|
||||
if !s.parseIndex() {
|
||||
return false
|
||||
}
|
||||
if !s.parseNum() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// parsePrintfVerb looks the formatting directive that begins the format string
|
||||
// and returns a formatState that encodes what the directive wants, without looking
|
||||
// at the actual arguments present in the call. The result is nil if there is an error.
|
||||
func (f *File) parsePrintfVerb(call *ast.CallExpr, name, format string, firstArg, argNum int) *formatState {
|
||||
state := &formatState{
|
||||
format: format,
|
||||
name: name,
|
||||
flags: make([]byte, 0, 5),
|
||||
argNum: argNum,
|
||||
argNums: make([]int, 0, 1),
|
||||
nbytes: 1, // There's guaranteed to be a percent sign.
|
||||
firstArg: firstArg,
|
||||
file: f,
|
||||
call: call,
|
||||
}
|
||||
// There may be flags.
|
||||
state.parseFlags()
|
||||
// There may be an index.
|
||||
if !state.parseIndex() {
|
||||
return nil
|
||||
}
|
||||
// There may be a width.
|
||||
if !state.parseNum() {
|
||||
return nil
|
||||
}
|
||||
// There may be a precision.
|
||||
if !state.parsePrecision() {
|
||||
return nil
|
||||
}
|
||||
// Now a verb, possibly prefixed by an index (which we may already have).
|
||||
if !state.indexPending && !state.parseIndex() {
|
||||
return nil
|
||||
}
|
||||
if state.nbytes == len(state.format) {
|
||||
f.Badf(call.Pos(), "%s format %s is missing verb at end of string", name, state.format)
|
||||
return nil
|
||||
}
|
||||
verb, w := utf8.DecodeRuneInString(state.format[state.nbytes:])
|
||||
state.verb = verb
|
||||
state.nbytes += w
|
||||
if verb != '%' {
|
||||
state.argNums = append(state.argNums, state.argNum)
|
||||
}
|
||||
state.format = state.format[:state.nbytes]
|
||||
return state
|
||||
}
|
||||
|
||||
// printfArgType encodes the types of expressions a printf verb accepts. It is a bitmask.
|
||||
type printfArgType int
|
||||
|
||||
const (
|
||||
argBool printfArgType = 1 << iota
|
||||
argInt
|
||||
argRune
|
||||
argString
|
||||
argFloat
|
||||
argComplex
|
||||
argPointer
|
||||
anyType printfArgType = ^0
|
||||
)
|
||||
|
||||
type printVerb struct {
|
||||
verb rune // User may provide verb through Formatter; could be a rune.
|
||||
flags string // known flags are all ASCII
|
||||
typ printfArgType
|
||||
}
|
||||
|
||||
// Common flag sets for printf verbs.
|
||||
const (
|
||||
noFlag = ""
|
||||
numFlag = " -+.0"
|
||||
sharpNumFlag = " -+.0#"
|
||||
allFlags = " -+.0#"
|
||||
)
|
||||
|
||||
// printVerbs identifies which flags are known to printf for each verb.
|
||||
var printVerbs = []printVerb{
|
||||
// '-' is a width modifier, always valid.
|
||||
// '.' is a precision for float, max width for strings.
|
||||
// '+' is required sign for numbers, Go format for %v.
|
||||
// '#' is alternate format for several verbs.
|
||||
// ' ' is spacer for numbers
|
||||
{'%', noFlag, 0},
|
||||
{'b', numFlag, argInt | argFloat | argComplex},
|
||||
{'c', "-", argRune | argInt},
|
||||
{'d', numFlag, argInt | argPointer},
|
||||
{'e', sharpNumFlag, argFloat | argComplex},
|
||||
{'E', sharpNumFlag, argFloat | argComplex},
|
||||
{'f', sharpNumFlag, argFloat | argComplex},
|
||||
{'F', sharpNumFlag, argFloat | argComplex},
|
||||
{'g', sharpNumFlag, argFloat | argComplex},
|
||||
{'G', sharpNumFlag, argFloat | argComplex},
|
||||
{'o', sharpNumFlag, argInt},
|
||||
{'p', "-#", argPointer},
|
||||
{'q', " -+.0#", argRune | argInt | argString},
|
||||
{'s', " -+.0", argString},
|
||||
{'t', "-", argBool},
|
||||
{'T', "-", anyType},
|
||||
{'U', "-#", argRune | argInt},
|
||||
{'v', allFlags, anyType},
|
||||
{'x', sharpNumFlag, argRune | argInt | argString | argPointer},
|
||||
{'X', sharpNumFlag, argRune | argInt | argString | argPointer},
|
||||
}
|
||||
|
||||
// okPrintfArg compares the formatState to the arguments actually present,
|
||||
// reporting any discrepancies it can discern. If the final argument is ellipsissed,
|
||||
// there's little it can do for that.
|
||||
func (f *File) okPrintfArg(call *ast.CallExpr, state *formatState) (ok bool) {
|
||||
var v printVerb
|
||||
found := false
|
||||
// Linear scan is fast enough for a small list.
|
||||
for _, v = range printVerbs {
|
||||
if v.verb == state.verb {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Does current arg implement fmt.Formatter?
|
||||
formatter := false
|
||||
if state.argNum < len(call.Args) {
|
||||
if tv, ok := f.pkg.types[call.Args[state.argNum]]; ok {
|
||||
formatter = f.isFormatter(tv.Type)
|
||||
}
|
||||
}
|
||||
|
||||
if !formatter {
|
||||
if !found {
|
||||
f.Badf(call.Pos(), "%s format %s has unknown verb %c", state.name, state.format, state.verb)
|
||||
return false
|
||||
}
|
||||
for _, flag := range state.flags {
|
||||
// TODO: Disable complaint about '0' for Go 1.10. To be fixed properly in 1.11.
|
||||
// See issues 23598 and 23605.
|
||||
if flag == '0' {
|
||||
continue
|
||||
}
|
||||
if !strings.ContainsRune(v.flags, rune(flag)) {
|
||||
f.Badf(call.Pos(), "%s format %s has unrecognized flag %c", state.name, state.format, flag)
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
// Verb is good. If len(state.argNums)>trueArgs, we have something like %.*s and all
|
||||
// but the final arg must be an integer.
|
||||
trueArgs := 1
|
||||
if state.verb == '%' {
|
||||
trueArgs = 0
|
||||
}
|
||||
nargs := len(state.argNums)
|
||||
for i := 0; i < nargs-trueArgs; i++ {
|
||||
argNum := state.argNums[i]
|
||||
if !f.argCanBeChecked(call, i, state) {
|
||||
return
|
||||
}
|
||||
arg := call.Args[argNum]
|
||||
if !f.matchArgType(argInt, nil, arg) {
|
||||
f.Badf(call.Pos(), "%s format %s uses non-int %s as argument of *", state.name, state.format, f.gofmt(arg))
|
||||
return false
|
||||
}
|
||||
}
|
||||
if state.verb == '%' || formatter {
|
||||
return true
|
||||
}
|
||||
argNum := state.argNums[len(state.argNums)-1]
|
||||
if !f.argCanBeChecked(call, len(state.argNums)-1, state) {
|
||||
return false
|
||||
}
|
||||
arg := call.Args[argNum]
|
||||
if f.isFunctionValue(arg) && state.verb != 'p' && state.verb != 'T' {
|
||||
f.Badf(call.Pos(), "%s format %s arg %s is a func value, not called", state.name, state.format, f.gofmt(arg))
|
||||
return false
|
||||
}
|
||||
if !f.matchArgType(v.typ, nil, arg) {
|
||||
typeString := ""
|
||||
if typ := f.pkg.types[arg].Type; typ != nil {
|
||||
typeString = typ.String()
|
||||
}
|
||||
f.Badf(call.Pos(), "%s format %s has arg %s of wrong type %s", state.name, state.format, f.gofmt(arg), typeString)
|
||||
return false
|
||||
}
|
||||
if v.typ&argString != 0 && v.verb != 'T' && !bytes.Contains(state.flags, []byte{'#'}) && f.recursiveStringer(arg) {
|
||||
f.Badf(call.Pos(), "%s format %s with arg %s causes recursive String method call", state.name, state.format, f.gofmt(arg))
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// recursiveStringer reports whether the provided argument is r or &r for the
|
||||
// fmt.Stringer receiver identifier r.
|
||||
func (f *File) recursiveStringer(e ast.Expr) bool {
|
||||
if len(f.stringerPtrs) == 0 {
|
||||
return false
|
||||
}
|
||||
ptr := false
|
||||
var obj *ast.Object
|
||||
switch e := e.(type) {
|
||||
case *ast.Ident:
|
||||
obj = e.Obj
|
||||
case *ast.UnaryExpr:
|
||||
if id, ok := e.X.(*ast.Ident); ok && e.Op == token.AND {
|
||||
obj = id.Obj
|
||||
ptr = true
|
||||
}
|
||||
}
|
||||
|
||||
// It's unlikely to be a recursive stringer if it has a Format method.
|
||||
if typ := f.pkg.types[e].Type; typ != nil {
|
||||
if f.isFormatter(typ) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// We compare the underlying Object, which checks that the identifier
|
||||
// is the one we declared as the receiver for the String method in
|
||||
// which this printf appears.
|
||||
ptrRecv, exist := f.stringerPtrs[obj]
|
||||
if !exist {
|
||||
return false
|
||||
}
|
||||
// We also need to check that using &t when we declared String
|
||||
// on (t *T) is ok; in such a case, the address is printed.
|
||||
if ptr && ptrRecv {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// isFunctionValue reports whether the expression is a function as opposed to a function call.
|
||||
// It is almost always a mistake to print a function value.
|
||||
func (f *File) isFunctionValue(e ast.Expr) bool {
|
||||
if typ := f.pkg.types[e].Type; typ != nil {
|
||||
_, ok := typ.(*types.Signature)
|
||||
return ok
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// argCanBeChecked reports whether the specified argument is statically present;
|
||||
// it may be beyond the list of arguments or in a terminal slice... argument, which
|
||||
// means we can't see it.
|
||||
func (f *File) argCanBeChecked(call *ast.CallExpr, formatArg int, state *formatState) bool {
|
||||
argNum := state.argNums[formatArg]
|
||||
if argNum <= 0 {
|
||||
// Shouldn't happen, so catch it with prejudice.
|
||||
panic("negative arg num")
|
||||
}
|
||||
if argNum < len(call.Args)-1 {
|
||||
return true // Always OK.
|
||||
}
|
||||
if call.Ellipsis.IsValid() {
|
||||
return false // We just can't tell; there could be many more arguments.
|
||||
}
|
||||
if argNum < len(call.Args) {
|
||||
return true
|
||||
}
|
||||
// There are bad indexes in the format or there are fewer arguments than the format needs.
|
||||
// This is the argument number relative to the format: Printf("%s", "hi") will give 1 for the "hi".
|
||||
arg := argNum - state.firstArg + 1 // People think of arguments as 1-indexed.
|
||||
f.Badf(call.Pos(), "%s format %s reads arg #%d, but call has %v", state.name, state.format, arg, count(len(call.Args)-state.firstArg, "arg"))
|
||||
return false
|
||||
}
|
||||
|
||||
// printFormatRE is the regexp we match and report as a possible format string
|
||||
// in the first argument to unformatted prints like fmt.Print.
|
||||
// We exclude the space flag, so that printing a string like "x % y" is not reported as a format.
|
||||
var printFormatRE = regexp.MustCompile(`%` + flagsRE + numOptRE + `\.?` + numOptRE + indexOptRE + verbRE)
|
||||
|
||||
const (
|
||||
flagsRE = `[+\-#]*`
|
||||
indexOptRE = `(\[[0-9]+\])?`
|
||||
numOptRE = `([0-9]+|` + indexOptRE + `\*)?`
|
||||
verbRE = `[bcdefgopqstvxEFGTUX]`
|
||||
)
|
||||
|
||||
// checkPrint checks a call to an unformatted print routine such as Println.
|
||||
func (f *File) checkPrint(call *ast.CallExpr, name string) {
|
||||
firstArg := 0
|
||||
typ := f.pkg.types[call.Fun].Type
|
||||
if typ == nil {
|
||||
// Skip checking functions with unknown type.
|
||||
return
|
||||
}
|
||||
if sig, ok := typ.(*types.Signature); ok {
|
||||
if !sig.Variadic() {
|
||||
// Skip checking non-variadic functions.
|
||||
return
|
||||
}
|
||||
params := sig.Params()
|
||||
firstArg = params.Len() - 1
|
||||
|
||||
typ := params.At(firstArg).Type()
|
||||
typ = typ.(*types.Slice).Elem()
|
||||
it, ok := typ.(*types.Interface)
|
||||
if !ok || !it.Empty() {
|
||||
// Skip variadic functions accepting non-interface{} args.
|
||||
return
|
||||
}
|
||||
}
|
||||
args := call.Args
|
||||
if len(args) <= firstArg {
|
||||
// Skip calls without variadic args.
|
||||
return
|
||||
}
|
||||
args = args[firstArg:]
|
||||
|
||||
if firstArg == 0 {
|
||||
if sel, ok := call.Args[0].(*ast.SelectorExpr); ok {
|
||||
if x, ok := sel.X.(*ast.Ident); ok {
|
||||
if x.Name == "os" && strings.HasPrefix(sel.Sel.Name, "Std") {
|
||||
f.Badf(call.Pos(), "%s does not take io.Writer but has first arg %s", name, f.gofmt(call.Args[0]))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
arg := args[0]
|
||||
if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
|
||||
// Ignore trailing % character in lit.Value.
|
||||
// The % in "abc 0.0%" couldn't be a formatting directive.
|
||||
s := strings.TrimSuffix(lit.Value, `%"`)
|
||||
if strings.Contains(s, "%") {
|
||||
m := printFormatRE.FindStringSubmatch(s)
|
||||
if m != nil {
|
||||
f.Badf(call.Pos(), "%s call has possible formatting directive %s", name, m[0])
|
||||
}
|
||||
}
|
||||
}
|
||||
if strings.HasSuffix(name, "ln") {
|
||||
// The last item, if a string, should not have a newline.
|
||||
arg = args[len(args)-1]
|
||||
if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
|
||||
str, _ := strconv.Unquote(lit.Value)
|
||||
if strings.HasSuffix(str, "\n") {
|
||||
f.Badf(call.Pos(), "%s arg list ends with redundant newline", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, arg := range args {
|
||||
if f.isFunctionValue(arg) {
|
||||
f.Badf(call.Pos(), "%s arg %s is a func value, not called", name, f.gofmt(arg))
|
||||
}
|
||||
if f.recursiveStringer(arg) {
|
||||
f.Badf(call.Pos(), "%s arg %s causes recursive call to String method", name, f.gofmt(arg))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// count(n, what) returns "1 what" or "N whats"
|
||||
// (assuming the plural of what is whats).
|
||||
func count(n int, what string) string {
|
||||
if n == 1 {
|
||||
return "1 " + what
|
||||
}
|
||||
return fmt.Sprintf("%d %ss", n, what)
|
||||
}
|
105
vendor/github.com/golangci/govet/rangeloop.go
generated
vendored
105
vendor/github.com/golangci/govet/rangeloop.go
generated
vendored
|
@ -1,105 +0,0 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
This file contains the code to check range loop variables bound inside function
|
||||
literals that are deferred or launched in new goroutines. We only check
|
||||
instances where the defer or go statement is the last statement in the loop
|
||||
body, as otherwise we would need whole program analysis.
|
||||
|
||||
For example:
|
||||
|
||||
for i, v := range s {
|
||||
go func() {
|
||||
println(i, v) // not what you might expect
|
||||
}()
|
||||
}
|
||||
|
||||
See: https://golang.org/doc/go_faq.html#closures_and_goroutines
|
||||
*/
|
||||
|
||||
package govet
|
||||
|
||||
import "go/ast"
|
||||
|
||||
func init() {
|
||||
register("rangeloops",
|
||||
"check that loop variables are used correctly",
|
||||
checkLoop,
|
||||
rangeStmt, forStmt)
|
||||
}
|
||||
|
||||
// checkLoop walks the body of the provided loop statement, checking whether
|
||||
// its index or value variables are used unsafely inside goroutines or deferred
|
||||
// function literals.
|
||||
func checkLoop(f *File, node ast.Node) {
|
||||
// Find the variables updated by the loop statement.
|
||||
var vars []*ast.Ident
|
||||
addVar := func(expr ast.Expr) {
|
||||
if id, ok := expr.(*ast.Ident); ok {
|
||||
vars = append(vars, id)
|
||||
}
|
||||
}
|
||||
var body *ast.BlockStmt
|
||||
switch n := node.(type) {
|
||||
case *ast.RangeStmt:
|
||||
body = n.Body
|
||||
addVar(n.Key)
|
||||
addVar(n.Value)
|
||||
case *ast.ForStmt:
|
||||
body = n.Body
|
||||
switch post := n.Post.(type) {
|
||||
case *ast.AssignStmt:
|
||||
// e.g. for p = head; p != nil; p = p.next
|
||||
for _, lhs := range post.Lhs {
|
||||
addVar(lhs)
|
||||
}
|
||||
case *ast.IncDecStmt:
|
||||
// e.g. for i := 0; i < n; i++
|
||||
addVar(post.X)
|
||||
}
|
||||
}
|
||||
if vars == nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Inspect a go or defer statement
|
||||
// if it's the last one in the loop body.
|
||||
// (We give up if there are following statements,
|
||||
// because it's hard to prove go isn't followed by wait,
|
||||
// or defer by return.)
|
||||
if len(body.List) == 0 {
|
||||
return
|
||||
}
|
||||
var last *ast.CallExpr
|
||||
switch s := body.List[len(body.List)-1].(type) {
|
||||
case *ast.GoStmt:
|
||||
last = s.Call
|
||||
case *ast.DeferStmt:
|
||||
last = s.Call
|
||||
default:
|
||||
return
|
||||
}
|
||||
lit, ok := last.Fun.(*ast.FuncLit)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
ast.Inspect(lit.Body, func(n ast.Node) bool {
|
||||
id, ok := n.(*ast.Ident)
|
||||
if !ok || id.Obj == nil {
|
||||
return true
|
||||
}
|
||||
if f.pkg.types[id].Type == nil {
|
||||
// Not referring to a variable (e.g. struct field name)
|
||||
return true
|
||||
}
|
||||
for _, v := range vars {
|
||||
if v.Obj == id.Obj {
|
||||
f.Badf(id.Pos(), "loop variable %s captured by func literal",
|
||||
id.Name)
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
98
vendor/github.com/golangci/govet/shift.go
generated
vendored
98
vendor/github.com/golangci/govet/shift.go
generated
vendored
|
@ -1,98 +0,0 @@
|
|||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
This file contains the code to check for suspicious shifts.
|
||||
*/
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/constant"
|
||||
"go/token"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("shift",
|
||||
"check for useless shifts",
|
||||
checkShift,
|
||||
binaryExpr, assignStmt)
|
||||
}
|
||||
|
||||
func checkShift(f *File, node ast.Node) {
|
||||
if f.dead[node] {
|
||||
// Skip shift checks on unreachable nodes.
|
||||
return
|
||||
}
|
||||
|
||||
switch node := node.(type) {
|
||||
case *ast.BinaryExpr:
|
||||
if node.Op == token.SHL || node.Op == token.SHR {
|
||||
checkLongShift(f, node, node.X, node.Y)
|
||||
}
|
||||
case *ast.AssignStmt:
|
||||
if len(node.Lhs) != 1 || len(node.Rhs) != 1 {
|
||||
return
|
||||
}
|
||||
if node.Tok == token.SHL_ASSIGN || node.Tok == token.SHR_ASSIGN {
|
||||
checkLongShift(f, node, node.Lhs[0], node.Rhs[0])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkLongShift checks if shift or shift-assign operations shift by more than
|
||||
// the length of the underlying variable.
|
||||
func checkLongShift(f *File, node ast.Node, x, y ast.Expr) {
|
||||
if f.pkg.types[x].Value != nil {
|
||||
// Ignore shifts of constants.
|
||||
// These are frequently used for bit-twiddling tricks
|
||||
// like ^uint(0) >> 63 for 32/64 bit detection and compatibility.
|
||||
return
|
||||
}
|
||||
|
||||
v := f.pkg.types[y].Value
|
||||
if v == nil {
|
||||
return
|
||||
}
|
||||
amt, ok := constant.Int64Val(v)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
t := f.pkg.types[x].Type
|
||||
if t == nil {
|
||||
return
|
||||
}
|
||||
b, ok := t.Underlying().(*types.Basic)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
var size int64
|
||||
switch b.Kind() {
|
||||
case types.Uint8, types.Int8:
|
||||
size = 8
|
||||
case types.Uint16, types.Int16:
|
||||
size = 16
|
||||
case types.Uint32, types.Int32:
|
||||
size = 32
|
||||
case types.Uint64, types.Int64:
|
||||
size = 64
|
||||
case types.Int, types.Uint:
|
||||
size = uintBitSize
|
||||
case types.Uintptr:
|
||||
size = uintptrBitSize
|
||||
default:
|
||||
return
|
||||
}
|
||||
if amt >= size {
|
||||
ident := f.gofmt(x)
|
||||
f.Badf(node.Pos(), "%s (%d bits) too small for shift of %d", ident, size, amt)
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
uintBitSize = 8 * archSizes.Sizeof(types.Typ[types.Uint])
|
||||
uintptrBitSize = 8 * archSizes.Sizeof(types.Typ[types.Uintptr])
|
||||
)
|
226
vendor/github.com/golangci/govet/structtag.go
generated
vendored
226
vendor/github.com/golangci/govet/structtag.go
generated
vendored
|
@ -1,226 +0,0 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the test for canonical struct tags.
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("structtags",
|
||||
"check that struct field tags have canonical format and apply to exported fields as needed",
|
||||
checkStructFieldTags,
|
||||
structType)
|
||||
}
|
||||
|
||||
// checkStructFieldTags checks all the field tags of a struct, including checking for duplicates.
|
||||
func checkStructFieldTags(f *File, node ast.Node) {
|
||||
var seen map[[2]string]token.Pos
|
||||
for _, field := range node.(*ast.StructType).Fields.List {
|
||||
checkCanonicalFieldTag(f, field, &seen)
|
||||
}
|
||||
}
|
||||
|
||||
var checkTagDups = []string{"json", "xml"}
|
||||
var checkTagSpaces = map[string]bool{"json": true, "xml": true, "asn1": true}
|
||||
|
||||
// checkCanonicalFieldTag checks a single struct field tag.
|
||||
func checkCanonicalFieldTag(f *File, field *ast.Field, seen *map[[2]string]token.Pos) {
|
||||
if field.Tag == nil {
|
||||
return
|
||||
}
|
||||
|
||||
tag, err := strconv.Unquote(field.Tag.Value)
|
||||
if err != nil {
|
||||
f.Badf(field.Pos(), "unable to read struct tag %s", field.Tag.Value)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateStructTag(tag); err != nil {
|
||||
raw, _ := strconv.Unquote(field.Tag.Value) // field.Tag.Value is known to be a quoted string
|
||||
f.Badf(field.Pos(), "struct field tag %#q not compatible with reflect.StructTag.Get: %s", raw, err)
|
||||
}
|
||||
|
||||
for _, key := range checkTagDups {
|
||||
val := reflect.StructTag(tag).Get(key)
|
||||
if val == "" || val == "-" || val[0] == ',' {
|
||||
continue
|
||||
}
|
||||
if key == "xml" && len(field.Names) > 0 && field.Names[0].Name == "XMLName" {
|
||||
// XMLName defines the XML element name of the struct being
|
||||
// checked. That name cannot collide with element or attribute
|
||||
// names defined on other fields of the struct. Vet does not have a
|
||||
// check for untagged fields of type struct defining their own name
|
||||
// by containing a field named XMLName; see issue 18256.
|
||||
continue
|
||||
}
|
||||
if i := strings.Index(val, ","); i >= 0 {
|
||||
if key == "xml" {
|
||||
// Use a separate namespace for XML attributes.
|
||||
for _, opt := range strings.Split(val[i:], ",") {
|
||||
if opt == "attr" {
|
||||
key += " attribute" // Key is part of the error message.
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
val = val[:i]
|
||||
}
|
||||
if *seen == nil {
|
||||
*seen = map[[2]string]token.Pos{}
|
||||
}
|
||||
if pos, ok := (*seen)[[2]string{key, val}]; ok {
|
||||
var name string
|
||||
if len(field.Names) > 0 {
|
||||
name = field.Names[0].Name
|
||||
} else {
|
||||
name = field.Type.(*ast.Ident).Name
|
||||
}
|
||||
f.Badf(field.Pos(), "struct field %s repeats %s tag %q also at %s", name, key, val, f.loc(pos))
|
||||
} else {
|
||||
(*seen)[[2]string{key, val}] = field.Pos()
|
||||
}
|
||||
}
|
||||
|
||||
// Check for use of json or xml tags with unexported fields.
|
||||
|
||||
// Embedded struct. Nothing to do for now, but that
|
||||
// may change, depending on what happens with issue 7363.
|
||||
if len(field.Names) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
if field.Names[0].IsExported() {
|
||||
return
|
||||
}
|
||||
|
||||
for _, enc := range [...]string{"json", "xml"} {
|
||||
if reflect.StructTag(tag).Get(enc) != "" {
|
||||
f.Badf(field.Pos(), "struct field %s has %s tag but is not exported", field.Names[0].Name, enc)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
errTagSyntax = errors.New("bad syntax for struct tag pair")
|
||||
errTagKeySyntax = errors.New("bad syntax for struct tag key")
|
||||
errTagValueSyntax = errors.New("bad syntax for struct tag value")
|
||||
errTagValueSpace = errors.New("suspicious space in struct tag value")
|
||||
errTagSpace = errors.New("key:\"value\" pairs not separated by spaces")
|
||||
)
|
||||
|
||||
// validateStructTag parses the struct tag and returns an error if it is not
|
||||
// in the canonical format, which is a space-separated list of key:"value"
|
||||
// settings. The value may contain spaces.
|
||||
func validateStructTag(tag string) error {
|
||||
// This code is based on the StructTag.Get code in package reflect.
|
||||
|
||||
n := 0
|
||||
for ; tag != ""; n++ {
|
||||
if n > 0 && tag != "" && tag[0] != ' ' {
|
||||
// More restrictive than reflect, but catches likely mistakes
|
||||
// like `x:"foo",y:"bar"`, which parses as `x:"foo" ,y:"bar"` with second key ",y".
|
||||
return errTagSpace
|
||||
}
|
||||
// Skip leading space.
|
||||
i := 0
|
||||
for i < len(tag) && tag[i] == ' ' {
|
||||
i++
|
||||
}
|
||||
tag = tag[i:]
|
||||
if tag == "" {
|
||||
break
|
||||
}
|
||||
|
||||
// Scan to colon. A space, a quote or a control character is a syntax error.
|
||||
// Strictly speaking, control chars include the range [0x7f, 0x9f], not just
|
||||
// [0x00, 0x1f], but in practice, we ignore the multi-byte control characters
|
||||
// as it is simpler to inspect the tag's bytes than the tag's runes.
|
||||
i = 0
|
||||
for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f {
|
||||
i++
|
||||
}
|
||||
if i == 0 {
|
||||
return errTagKeySyntax
|
||||
}
|
||||
if i+1 >= len(tag) || tag[i] != ':' {
|
||||
return errTagSyntax
|
||||
}
|
||||
if tag[i+1] != '"' {
|
||||
return errTagValueSyntax
|
||||
}
|
||||
key := tag[:i]
|
||||
tag = tag[i+1:]
|
||||
|
||||
// Scan quoted string to find value.
|
||||
i = 1
|
||||
for i < len(tag) && tag[i] != '"' {
|
||||
if tag[i] == '\\' {
|
||||
i++
|
||||
}
|
||||
i++
|
||||
}
|
||||
if i >= len(tag) {
|
||||
return errTagValueSyntax
|
||||
}
|
||||
qvalue := tag[:i+1]
|
||||
tag = tag[i+1:]
|
||||
|
||||
value, err := strconv.Unquote(qvalue)
|
||||
if err != nil {
|
||||
return errTagValueSyntax
|
||||
}
|
||||
|
||||
if !checkTagSpaces[key] {
|
||||
continue
|
||||
}
|
||||
|
||||
switch key {
|
||||
case "xml":
|
||||
// If the first or last character in the XML tag is a space, it is
|
||||
// suspicious.
|
||||
if strings.Trim(value, " ") != value {
|
||||
return errTagValueSpace
|
||||
}
|
||||
|
||||
// If there are multiple spaces, they are suspicious.
|
||||
if strings.Count(value, " ") > 1 {
|
||||
return errTagValueSpace
|
||||
}
|
||||
|
||||
// If there is no comma, skip the rest of the checks.
|
||||
comma := strings.IndexRune(value, ',')
|
||||
if comma < 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// If the character before a comma is a space, this is suspicious.
|
||||
if comma > 0 && value[comma-1] == ' ' {
|
||||
return errTagValueSpace
|
||||
}
|
||||
value = value[comma+1:]
|
||||
case "json":
|
||||
// JSON allows using spaces in the name, so skip it.
|
||||
comma := strings.IndexRune(value, ',')
|
||||
if comma < 0 {
|
||||
continue
|
||||
}
|
||||
value = value[comma+1:]
|
||||
}
|
||||
|
||||
if strings.IndexByte(value, ' ') >= 0 {
|
||||
return errTagValueSpace
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
187
vendor/github.com/golangci/govet/tests.go
generated
vendored
187
vendor/github.com/golangci/govet/tests.go
generated
vendored
|
@ -1,187 +0,0 @@
|
|||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/types"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("tests",
|
||||
"check for common mistaken usages of tests/documentation examples",
|
||||
checkTestFunctions,
|
||||
funcDecl)
|
||||
}
|
||||
|
||||
func isExampleSuffix(s string) bool {
|
||||
r, size := utf8.DecodeRuneInString(s)
|
||||
return size > 0 && unicode.IsLower(r)
|
||||
}
|
||||
|
||||
func isTestSuffix(name string) bool {
|
||||
if len(name) == 0 {
|
||||
// "Test" is ok.
|
||||
return true
|
||||
}
|
||||
r, _ := utf8.DecodeRuneInString(name)
|
||||
return !unicode.IsLower(r)
|
||||
}
|
||||
|
||||
func isTestParam(typ ast.Expr, wantType string) bool {
|
||||
ptr, ok := typ.(*ast.StarExpr)
|
||||
if !ok {
|
||||
// Not a pointer.
|
||||
return false
|
||||
}
|
||||
// No easy way of making sure it's a *testing.T or *testing.B:
|
||||
// ensure the name of the type matches.
|
||||
if name, ok := ptr.X.(*ast.Ident); ok {
|
||||
return name.Name == wantType
|
||||
}
|
||||
if sel, ok := ptr.X.(*ast.SelectorExpr); ok {
|
||||
return sel.Sel.Name == wantType
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func lookup(name string, scopes []*types.Scope) types.Object {
|
||||
for _, scope := range scopes {
|
||||
if o := scope.Lookup(name); o != nil {
|
||||
return o
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func extendedScope(f *File) []*types.Scope {
|
||||
scopes := []*types.Scope{f.pkg.typesPkg.Scope()}
|
||||
if f.basePkg != nil {
|
||||
scopes = append(scopes, f.basePkg.typesPkg.Scope())
|
||||
} else {
|
||||
// If basePkg is not specified (e.g. when checking a single file) try to
|
||||
// find it among imports.
|
||||
pkgName := f.pkg.typesPkg.Name()
|
||||
if strings.HasSuffix(pkgName, "_test") {
|
||||
basePkgName := strings.TrimSuffix(pkgName, "_test")
|
||||
for _, p := range f.pkg.typesPkg.Imports() {
|
||||
if p.Name() == basePkgName {
|
||||
scopes = append(scopes, p.Scope())
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return scopes
|
||||
}
|
||||
|
||||
func checkExample(fn *ast.FuncDecl, f *File, report reporter) {
|
||||
fnName := fn.Name.Name
|
||||
if params := fn.Type.Params; len(params.List) != 0 {
|
||||
report("%s should be niladic", fnName)
|
||||
}
|
||||
if results := fn.Type.Results; results != nil && len(results.List) != 0 {
|
||||
report("%s should return nothing", fnName)
|
||||
}
|
||||
|
||||
if filesRun && !includesNonTest {
|
||||
// The coherence checks between a test and the package it tests
|
||||
// will report false positives if no non-test files have
|
||||
// been provided.
|
||||
return
|
||||
}
|
||||
|
||||
if fnName == "Example" {
|
||||
// Nothing more to do.
|
||||
return
|
||||
}
|
||||
|
||||
var (
|
||||
exName = strings.TrimPrefix(fnName, "Example")
|
||||
elems = strings.SplitN(exName, "_", 3)
|
||||
ident = elems[0]
|
||||
obj = lookup(ident, extendedScope(f))
|
||||
)
|
||||
if ident != "" && obj == nil {
|
||||
// Check ExampleFoo and ExampleBadFoo.
|
||||
report("%s refers to unknown identifier: %s", fnName, ident)
|
||||
// Abort since obj is absent and no subsequent checks can be performed.
|
||||
return
|
||||
}
|
||||
if len(elems) < 2 {
|
||||
// Nothing more to do.
|
||||
return
|
||||
}
|
||||
|
||||
if ident == "" {
|
||||
// Check Example_suffix and Example_BadSuffix.
|
||||
if residual := strings.TrimPrefix(exName, "_"); !isExampleSuffix(residual) {
|
||||
report("%s has malformed example suffix: %s", fnName, residual)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
mmbr := elems[1]
|
||||
if !isExampleSuffix(mmbr) {
|
||||
// Check ExampleFoo_Method and ExampleFoo_BadMethod.
|
||||
if obj, _, _ := types.LookupFieldOrMethod(obj.Type(), true, obj.Pkg(), mmbr); obj == nil {
|
||||
report("%s refers to unknown field or method: %s.%s", fnName, ident, mmbr)
|
||||
}
|
||||
}
|
||||
if len(elems) == 3 && !isExampleSuffix(elems[2]) {
|
||||
// Check ExampleFoo_Method_suffix and ExampleFoo_Method_Badsuffix.
|
||||
report("%s has malformed example suffix: %s", fnName, elems[2])
|
||||
}
|
||||
}
|
||||
|
||||
func checkTest(fn *ast.FuncDecl, prefix string, report reporter) {
|
||||
// Want functions with 0 results and 1 parameter.
|
||||
if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 ||
|
||||
fn.Type.Params == nil ||
|
||||
len(fn.Type.Params.List) != 1 ||
|
||||
len(fn.Type.Params.List[0].Names) > 1 {
|
||||
return
|
||||
}
|
||||
|
||||
// The param must look like a *testing.T or *testing.B.
|
||||
if !isTestParam(fn.Type.Params.List[0].Type, prefix[:1]) {
|
||||
return
|
||||
}
|
||||
|
||||
if !isTestSuffix(fn.Name.Name[len(prefix):]) {
|
||||
report("%s has malformed name: first letter after '%s' must not be lowercase", fn.Name.Name, prefix)
|
||||
}
|
||||
}
|
||||
|
||||
type reporter func(format string, args ...interface{})
|
||||
|
||||
// checkTestFunctions walks Test, Benchmark and Example functions checking
|
||||
// malformed names, wrong signatures and examples documenting nonexistent
|
||||
// identifiers.
|
||||
func checkTestFunctions(f *File, node ast.Node) {
|
||||
if !strings.HasSuffix(f.name, "_test.go") {
|
||||
return
|
||||
}
|
||||
|
||||
fn, ok := node.(*ast.FuncDecl)
|
||||
if !ok || fn.Recv != nil {
|
||||
// Ignore non-functions or functions with receivers.
|
||||
return
|
||||
}
|
||||
|
||||
report := func(format string, args ...interface{}) { f.Badf(node.Pos(), format, args...) }
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(fn.Name.Name, "Example"):
|
||||
checkExample(fn, f, report)
|
||||
case strings.HasPrefix(fn.Name.Name, "Test"):
|
||||
checkTest(fn, "Test", report)
|
||||
case strings.HasPrefix(fn.Name.Name, "Benchmark"):
|
||||
checkTest(fn, "Benchmark", report)
|
||||
}
|
||||
}
|
347
vendor/github.com/golangci/govet/types.go
generated
vendored
347
vendor/github.com/golangci/govet/types.go
generated
vendored
|
@ -1,347 +0,0 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the pieces of the tool that use typechecking from the go/types package.
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/importer"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"golang.org/x/tools/go/gcexportdata"
|
||||
"golang.org/x/tools/go/loader"
|
||||
)
|
||||
|
||||
// stdImporter is the importer we use to import packages.
|
||||
// It is shared so that all packages are imported by the same importer.
|
||||
var stdImporter types.Importer
|
||||
|
||||
var (
|
||||
errorType *types.Interface
|
||||
stringerType *types.Interface // possibly nil
|
||||
formatterType *types.Interface // possibly nil
|
||||
)
|
||||
|
||||
func newGCExportDataImporter(fset *token.FileSet) types.ImporterFrom {
|
||||
return gcexportdata.NewImporter(fset, make(map[string]*types.Package))
|
||||
}
|
||||
|
||||
func inittypes() error {
|
||||
errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
|
||||
|
||||
typ, err := importType("fmt", "Stringer")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
stringerType = typ.Underlying().(*types.Interface)
|
||||
|
||||
typ, err = importType("fmt", "Formatter")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
formatterType = typ.Underlying().(*types.Interface)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// isNamedType reports whether t is the named type path.name.
|
||||
func isNamedType(t types.Type, path, name string) bool {
|
||||
n, ok := t.(*types.Named)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
obj := n.Obj()
|
||||
return obj.Name() == name && obj.Pkg() != nil && obj.Pkg().Path() == path
|
||||
}
|
||||
|
||||
// importType returns the type denoted by the qualified identifier
|
||||
// path.name, and adds the respective package to the imports map
|
||||
// as a side effect. In case of an error, importType returns nil.
|
||||
func importType(path, name string) (types.Type, error) {
|
||||
startedAt := time.Now()
|
||||
defer func() {
|
||||
fmt.Fprintf(os.Stderr, "vet: import of type %s.%s took %s\n", path, name, time.Since(startedAt))
|
||||
}()
|
||||
|
||||
pkg, err := stdImporter.Import(path)
|
||||
if err != nil {
|
||||
// This can happen if the package at path hasn't been compiled yet.
|
||||
return nil, fmt.Errorf("import of type %s.%s failed: %v", path, name, err)
|
||||
}
|
||||
if obj, ok := pkg.Scope().Lookup(name).(*types.TypeName); ok {
|
||||
return obj.Type(), nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("can't import type %s.%s: invalid type name %q", path, name, name)
|
||||
}
|
||||
|
||||
func (pkg *Package) check(fs *token.FileSet, astFiles []*ast.File, pkgInfo *loader.PackageInfo) []error {
|
||||
if stdImporter == nil {
|
||||
if *source {
|
||||
stdImporter = importer.For("source", nil)
|
||||
} else {
|
||||
stdImporter = newGCExportDataImporter(fs)
|
||||
}
|
||||
if err := inittypes(); err != nil {
|
||||
return []error{fmt.Errorf("can't init std types: %s", err)}
|
||||
}
|
||||
}
|
||||
|
||||
var allErrors []error
|
||||
pkg.spans = make(map[types.Object]Span)
|
||||
if pkgInfo != nil {
|
||||
pkg.defs = pkgInfo.Defs
|
||||
pkg.uses = pkgInfo.Uses
|
||||
pkg.selectors = pkgInfo.Selections
|
||||
pkg.types = pkgInfo.Types
|
||||
pkg.typesPkg = pkgInfo.Pkg
|
||||
} else {
|
||||
pkg.defs = make(map[*ast.Ident]types.Object)
|
||||
pkg.uses = make(map[*ast.Ident]types.Object)
|
||||
pkg.selectors = make(map[*ast.SelectorExpr]*types.Selection)
|
||||
pkg.types = make(map[ast.Expr]types.TypeAndValue)
|
||||
|
||||
config := types.Config{
|
||||
// We use the same importer for all imports to ensure that
|
||||
// everybody sees identical packages for the given paths.
|
||||
Importer: stdImporter,
|
||||
// By providing a Config with our own error function, it will continue
|
||||
// past the first error. We collect them all for printing later.
|
||||
Error: func(e error) {
|
||||
allErrors = append(allErrors, e)
|
||||
},
|
||||
|
||||
Sizes: archSizes,
|
||||
}
|
||||
info := &types.Info{
|
||||
Selections: pkg.selectors,
|
||||
Types: pkg.types,
|
||||
Defs: pkg.defs,
|
||||
Uses: pkg.uses,
|
||||
}
|
||||
typesPkg, err := config.Check(pkg.path, fs, astFiles, info)
|
||||
if len(allErrors) == 0 && err != nil {
|
||||
allErrors = append(allErrors, fmt.Errorf("type checker failed: %s", err))
|
||||
}
|
||||
|
||||
pkg.typesPkg = typesPkg
|
||||
}
|
||||
|
||||
// update spans
|
||||
for id, obj := range pkg.defs {
|
||||
pkg.growSpan(id, obj)
|
||||
}
|
||||
for id, obj := range pkg.uses {
|
||||
pkg.growSpan(id, obj)
|
||||
}
|
||||
return allErrors
|
||||
}
|
||||
|
||||
// matchArgType reports an error if printf verb t is not appropriate
|
||||
// for operand arg.
|
||||
//
|
||||
// typ is used only for recursive calls; external callers must supply nil.
|
||||
//
|
||||
// (Recursion arises from the compound types {map,chan,slice} which
|
||||
// may be printed with %d etc. if that is appropriate for their element
|
||||
// types.)
|
||||
func (f *File) matchArgType(t printfArgType, typ types.Type, arg ast.Expr) bool {
|
||||
return f.matchArgTypeInternal(t, typ, arg, make(map[types.Type]bool))
|
||||
}
|
||||
|
||||
// matchArgTypeInternal is the internal version of matchArgType. It carries a map
|
||||
// remembering what types are in progress so we don't recur when faced with recursive
|
||||
// types or mutually recursive types.
|
||||
func (f *File) matchArgTypeInternal(t printfArgType, typ types.Type, arg ast.Expr, inProgress map[types.Type]bool) bool {
|
||||
// %v, %T accept any argument type.
|
||||
if t == anyType {
|
||||
return true
|
||||
}
|
||||
if typ == nil {
|
||||
// external call
|
||||
typ = f.pkg.types[arg].Type
|
||||
if typ == nil {
|
||||
return true // probably a type check problem
|
||||
}
|
||||
}
|
||||
// If the type implements fmt.Formatter, we have nothing to check.
|
||||
if f.isFormatter(typ) {
|
||||
return true
|
||||
}
|
||||
// If we can use a string, might arg (dynamically) implement the Stringer or Error interface?
|
||||
if t&argString != 0 && isConvertibleToString(typ) {
|
||||
return true
|
||||
}
|
||||
|
||||
typ = typ.Underlying()
|
||||
if inProgress[typ] {
|
||||
// We're already looking at this type. The call that started it will take care of it.
|
||||
return true
|
||||
}
|
||||
inProgress[typ] = true
|
||||
|
||||
switch typ := typ.(type) {
|
||||
case *types.Signature:
|
||||
return t&argPointer != 0
|
||||
|
||||
case *types.Map:
|
||||
// Recur: map[int]int matches %d.
|
||||
return t&argPointer != 0 ||
|
||||
(f.matchArgTypeInternal(t, typ.Key(), arg, inProgress) && f.matchArgTypeInternal(t, typ.Elem(), arg, inProgress))
|
||||
|
||||
case *types.Chan:
|
||||
return t&argPointer != 0
|
||||
|
||||
case *types.Array:
|
||||
// Same as slice.
|
||||
if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
|
||||
return true // %s matches []byte
|
||||
}
|
||||
// Recur: []int matches %d.
|
||||
return t&argPointer != 0 || f.matchArgTypeInternal(t, typ.Elem(), arg, inProgress)
|
||||
|
||||
case *types.Slice:
|
||||
// Same as array.
|
||||
if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
|
||||
return true // %s matches []byte
|
||||
}
|
||||
// Recur: []int matches %d. But watch out for
|
||||
// type T []T
|
||||
// If the element is a pointer type (type T[]*T), it's handled fine by the Pointer case below.
|
||||
return t&argPointer != 0 || f.matchArgTypeInternal(t, typ.Elem(), arg, inProgress)
|
||||
|
||||
case *types.Pointer:
|
||||
// Ugly, but dealing with an edge case: a known pointer to an invalid type,
|
||||
// probably something from a failed import.
|
||||
if typ.Elem().String() == "invalid type" {
|
||||
if *verbose {
|
||||
f.Warnf(arg.Pos(), "printf argument %v is pointer to invalid or unknown type", f.gofmt(arg))
|
||||
}
|
||||
return true // special case
|
||||
}
|
||||
// If it's actually a pointer with %p, it prints as one.
|
||||
if t == argPointer {
|
||||
return true
|
||||
}
|
||||
// If it's pointer to struct, that's equivalent in our analysis to whether we can print the struct.
|
||||
if str, ok := typ.Elem().Underlying().(*types.Struct); ok {
|
||||
return f.matchStructArgType(t, str, arg, inProgress)
|
||||
}
|
||||
// Check whether the rest can print pointers.
|
||||
return t&argPointer != 0
|
||||
|
||||
case *types.Struct:
|
||||
return f.matchStructArgType(t, typ, arg, inProgress)
|
||||
|
||||
case *types.Interface:
|
||||
// There's little we can do.
|
||||
// Whether any particular verb is valid depends on the argument.
|
||||
// The user may have reasonable prior knowledge of the contents of the interface.
|
||||
return true
|
||||
|
||||
case *types.Basic:
|
||||
switch typ.Kind() {
|
||||
case types.UntypedBool,
|
||||
types.Bool:
|
||||
return t&argBool != 0
|
||||
|
||||
case types.UntypedInt,
|
||||
types.Int,
|
||||
types.Int8,
|
||||
types.Int16,
|
||||
types.Int32,
|
||||
types.Int64,
|
||||
types.Uint,
|
||||
types.Uint8,
|
||||
types.Uint16,
|
||||
types.Uint32,
|
||||
types.Uint64,
|
||||
types.Uintptr:
|
||||
return t&argInt != 0
|
||||
|
||||
case types.UntypedFloat,
|
||||
types.Float32,
|
||||
types.Float64:
|
||||
return t&argFloat != 0
|
||||
|
||||
case types.UntypedComplex,
|
||||
types.Complex64,
|
||||
types.Complex128:
|
||||
return t&argComplex != 0
|
||||
|
||||
case types.UntypedString,
|
||||
types.String:
|
||||
return t&argString != 0
|
||||
|
||||
case types.UnsafePointer:
|
||||
return t&(argPointer|argInt) != 0
|
||||
|
||||
case types.UntypedRune:
|
||||
return t&(argInt|argRune) != 0
|
||||
|
||||
case types.UntypedNil:
|
||||
return false
|
||||
|
||||
case types.Invalid:
|
||||
if *verbose {
|
||||
f.Warnf(arg.Pos(), "printf argument %v has invalid or unknown type", f.gofmt(arg))
|
||||
}
|
||||
return true // Probably a type check problem.
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func isConvertibleToString(typ types.Type) bool {
|
||||
if bt, ok := typ.(*types.Basic); ok && bt.Kind() == types.UntypedNil {
|
||||
// We explicitly don't want untyped nil, which is
|
||||
// convertible to both of the interfaces below, as it
|
||||
// would just panic anyway.
|
||||
return false
|
||||
}
|
||||
if types.ConvertibleTo(typ, errorType) {
|
||||
return true // via .Error()
|
||||
}
|
||||
if stringerType != nil && types.ConvertibleTo(typ, stringerType) {
|
||||
return true // via .String()
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// hasBasicType reports whether x's type is a types.Basic with the given kind.
|
||||
func (f *File) hasBasicType(x ast.Expr, kind types.BasicKind) bool {
|
||||
t := f.pkg.types[x].Type
|
||||
if t != nil {
|
||||
t = t.Underlying()
|
||||
}
|
||||
b, ok := t.(*types.Basic)
|
||||
return ok && b.Kind() == kind
|
||||
}
|
||||
|
||||
// matchStructArgType reports whether all the elements of the struct match the expected
|
||||
// type. For instance, with "%d" all the elements must be printable with the "%d" format.
|
||||
func (f *File) matchStructArgType(t printfArgType, typ *types.Struct, arg ast.Expr, inProgress map[types.Type]bool) bool {
|
||||
for i := 0; i < typ.NumFields(); i++ {
|
||||
typf := typ.Field(i)
|
||||
if !f.matchArgTypeInternal(t, typf.Type(), arg, inProgress) {
|
||||
return false
|
||||
}
|
||||
if t&argString != 0 && !typf.Exported() && isConvertibleToString(typf.Type()) {
|
||||
// Issue #17798: unexported Stringer or error cannot be properly fomatted.
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
var archSizes = types.SizesFor("gc", build.Default.GOARCH)
|
97
vendor/github.com/golangci/govet/unsafeptr.go
generated
vendored
97
vendor/github.com/golangci/govet/unsafeptr.go
generated
vendored
|
@ -1,97 +0,0 @@
|
|||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Check for invalid uintptr -> unsafe.Pointer conversions.
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("unsafeptr",
|
||||
"check for misuse of unsafe.Pointer",
|
||||
checkUnsafePointer,
|
||||
callExpr)
|
||||
}
|
||||
|
||||
func checkUnsafePointer(f *File, node ast.Node) {
|
||||
x := node.(*ast.CallExpr)
|
||||
if len(x.Args) != 1 {
|
||||
return
|
||||
}
|
||||
if f.hasBasicType(x.Fun, types.UnsafePointer) && f.hasBasicType(x.Args[0], types.Uintptr) && !f.isSafeUintptr(x.Args[0]) {
|
||||
f.Badf(x.Pos(), "possible misuse of unsafe.Pointer")
|
||||
}
|
||||
}
|
||||
|
||||
// isSafeUintptr reports whether x - already known to be a uintptr -
|
||||
// is safe to convert to unsafe.Pointer. It is safe if x is itself derived
|
||||
// directly from an unsafe.Pointer via conversion and pointer arithmetic
|
||||
// or if x is the result of reflect.Value.Pointer or reflect.Value.UnsafeAddr
|
||||
// or obtained from the Data field of a *reflect.SliceHeader or *reflect.StringHeader.
|
||||
func (f *File) isSafeUintptr(x ast.Expr) bool {
|
||||
switch x := x.(type) {
|
||||
case *ast.ParenExpr:
|
||||
return f.isSafeUintptr(x.X)
|
||||
|
||||
case *ast.SelectorExpr:
|
||||
switch x.Sel.Name {
|
||||
case "Data":
|
||||
// reflect.SliceHeader and reflect.StringHeader are okay,
|
||||
// but only if they are pointing at a real slice or string.
|
||||
// It's not okay to do:
|
||||
// var x SliceHeader
|
||||
// x.Data = uintptr(unsafe.Pointer(...))
|
||||
// ... use x ...
|
||||
// p := unsafe.Pointer(x.Data)
|
||||
// because in the middle the garbage collector doesn't
|
||||
// see x.Data as a pointer and so x.Data may be dangling
|
||||
// by the time we get to the conversion at the end.
|
||||
// For now approximate by saying that *Header is okay
|
||||
// but Header is not.
|
||||
pt, ok := f.pkg.types[x.X].Type.(*types.Pointer)
|
||||
if ok {
|
||||
t, ok := pt.Elem().(*types.Named)
|
||||
if ok && t.Obj().Pkg().Path() == "reflect" {
|
||||
switch t.Obj().Name() {
|
||||
case "StringHeader", "SliceHeader":
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case *ast.CallExpr:
|
||||
switch len(x.Args) {
|
||||
case 0:
|
||||
// maybe call to reflect.Value.Pointer or reflect.Value.UnsafeAddr.
|
||||
sel, ok := x.Fun.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
switch sel.Sel.Name {
|
||||
case "Pointer", "UnsafeAddr":
|
||||
t, ok := f.pkg.types[sel.X].Type.(*types.Named)
|
||||
if ok && t.Obj().Pkg().Path() == "reflect" && t.Obj().Name() == "Value" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
case 1:
|
||||
// maybe conversion of uintptr to unsafe.Pointer
|
||||
return f.hasBasicType(x.Fun, types.Uintptr) && f.hasBasicType(x.Args[0], types.UnsafePointer)
|
||||
}
|
||||
|
||||
case *ast.BinaryExpr:
|
||||
switch x.Op {
|
||||
case token.ADD, token.SUB, token.AND_NOT:
|
||||
return f.isSafeUintptr(x.X) && !f.isSafeUintptr(x.Y)
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
93
vendor/github.com/golangci/govet/unused.go
generated
vendored
93
vendor/github.com/golangci/govet/unused.go
generated
vendored
|
@ -1,93 +0,0 @@
|
|||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file defines the check for unused results of calls to certain
|
||||
// pure functions.
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var unusedFuncsFlag = flag.String("unusedfuncs",
|
||||
"errors.New,fmt.Errorf,fmt.Sprintf,fmt.Sprint,sort.Reverse",
|
||||
"comma-separated list of functions whose results must be used")
|
||||
|
||||
var unusedStringMethodsFlag = flag.String("unusedstringmethods",
|
||||
"Error,String",
|
||||
"comma-separated list of names of methods of type func() string whose results must be used")
|
||||
|
||||
func init() {
|
||||
register("unusedresult",
|
||||
"check for unused result of calls to functions in -unusedfuncs list and methods in -unusedstringmethods list",
|
||||
checkUnusedResult,
|
||||
exprStmt)
|
||||
}
|
||||
|
||||
// func() string
|
||||
var sigNoArgsStringResult = types.NewSignature(nil, nil,
|
||||
types.NewTuple(types.NewVar(token.NoPos, nil, "", types.Typ[types.String])),
|
||||
false)
|
||||
|
||||
var unusedFuncs = make(map[string]bool)
|
||||
var unusedStringMethods = make(map[string]bool)
|
||||
|
||||
func initUnusedFlags() {
|
||||
commaSplit := func(s string, m map[string]bool) {
|
||||
if s != "" {
|
||||
for _, name := range strings.Split(s, ",") {
|
||||
if len(name) == 0 {
|
||||
flag.Usage()
|
||||
}
|
||||
m[name] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
commaSplit(*unusedFuncsFlag, unusedFuncs)
|
||||
commaSplit(*unusedStringMethodsFlag, unusedStringMethods)
|
||||
}
|
||||
|
||||
func checkUnusedResult(f *File, n ast.Node) {
|
||||
call, ok := unparen(n.(*ast.ExprStmt).X).(*ast.CallExpr)
|
||||
if !ok {
|
||||
return // not a call statement
|
||||
}
|
||||
fun := unparen(call.Fun)
|
||||
|
||||
if f.pkg.types[fun].IsType() {
|
||||
return // a conversion, not a call
|
||||
}
|
||||
|
||||
selector, ok := fun.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
return // neither a method call nor a qualified ident
|
||||
}
|
||||
|
||||
sel, ok := f.pkg.selectors[selector]
|
||||
if ok && sel.Kind() == types.MethodVal {
|
||||
// method (e.g. foo.String())
|
||||
obj := sel.Obj().(*types.Func)
|
||||
sig := sel.Type().(*types.Signature)
|
||||
if types.Identical(sig, sigNoArgsStringResult) {
|
||||
if unusedStringMethods[obj.Name()] {
|
||||
f.Badf(call.Lparen, "result of (%s).%s call not used",
|
||||
sig.Recv().Type(), obj.Name())
|
||||
}
|
||||
}
|
||||
} else if !ok {
|
||||
// package-qualified function (e.g. fmt.Errorf)
|
||||
obj := f.pkg.uses[selector.Sel]
|
||||
if obj, ok := obj.(*types.Func); ok {
|
||||
qname := obj.Pkg().Path() + "." + obj.Name()
|
||||
if unusedFuncs[qname] {
|
||||
f.Badf(call.Lparen, "result of %v call not used", qname)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
193
vendor/golang.org/x/tools/go/analysis/analysis.go
generated
vendored
Normal file
193
vendor/golang.org/x/tools/go/analysis/analysis.go
generated
vendored
Normal file
|
@ -0,0 +1,193 @@
|
|||
package analysis
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// An Analyzer describes an analysis function and its options.
|
||||
type Analyzer struct {
|
||||
// The Name of the analyzer must be a valid Go identifier
|
||||
// as it may appear in command-line flags, URLs, and so on.
|
||||
Name string
|
||||
|
||||
// Doc is the documentation for the analyzer.
|
||||
// The part before the first "\n\n" is the title
|
||||
// (no capital or period, max ~60 letters).
|
||||
Doc string
|
||||
|
||||
// Flags defines any flags accepted by the analyzer.
|
||||
// The manner in which these flags are exposed to the user
|
||||
// depends on the driver which runs the analyzer.
|
||||
Flags flag.FlagSet
|
||||
|
||||
// Run applies the analyzer to a package.
|
||||
// It returns an error if the analyzer failed.
|
||||
//
|
||||
// On success, the Run function may return a result
|
||||
// computed by the Analyzer; its type must match ResultType.
|
||||
// The driver makes this result available as an input to
|
||||
// another Analyzer that depends directly on this one (see
|
||||
// Requires) when it analyzes the same package.
|
||||
//
|
||||
// To pass analysis results between packages (and thus
|
||||
// potentially between address spaces), use Facts, which are
|
||||
// serializable.
|
||||
Run func(*Pass) (interface{}, error)
|
||||
|
||||
// RunDespiteErrors allows the driver to invoke
|
||||
// the Run method of this analyzer even on a
|
||||
// package that contains parse or type errors.
|
||||
RunDespiteErrors bool
|
||||
|
||||
// Requires is a set of analyzers that must run successfully
|
||||
// before this one on a given package. This analyzer may inspect
|
||||
// the outputs produced by each analyzer in Requires.
|
||||
// The graph over analyzers implied by Requires edges must be acyclic.
|
||||
//
|
||||
// Requires establishes a "horizontal" dependency between
|
||||
// analysis passes (different analyzers, same package).
|
||||
Requires []*Analyzer
|
||||
|
||||
// ResultType is the type of the optional result of the Run function.
|
||||
ResultType reflect.Type
|
||||
|
||||
// FactTypes indicates that this analyzer imports and exports
|
||||
// Facts of the specified concrete types.
|
||||
// An analyzer that uses facts may assume that its import
|
||||
// dependencies have been similarly analyzed before it runs.
|
||||
// Facts must be pointers.
|
||||
//
|
||||
// FactTypes establishes a "vertical" dependency between
|
||||
// analysis passes (same analyzer, different packages).
|
||||
FactTypes []Fact
|
||||
}
|
||||
|
||||
func (a *Analyzer) String() string { return a.Name }
|
||||
|
||||
// A Pass provides information to the Run function that
|
||||
// applies a specific analyzer to a single Go package.
|
||||
//
|
||||
// It forms the interface between the analysis logic and the driver
|
||||
// program, and has both input and an output components.
|
||||
//
|
||||
// As in a compiler, one pass may depend on the result computed by another.
|
||||
//
|
||||
// The Run function should not call any of the Pass functions concurrently.
|
||||
type Pass struct {
|
||||
Analyzer *Analyzer // the identity of the current analyzer
|
||||
|
||||
// syntax and type information
|
||||
Fset *token.FileSet // file position information
|
||||
Files []*ast.File // the abstract syntax tree of each file
|
||||
OtherFiles []string // names of non-Go files of this package
|
||||
Pkg *types.Package // type information about the package
|
||||
TypesInfo *types.Info // type information about the syntax trees
|
||||
TypesSizes types.Sizes // function for computing sizes of types
|
||||
|
||||
// Report reports a Diagnostic, a finding about a specific location
|
||||
// in the analyzed source code such as a potential mistake.
|
||||
// It may be called by the Run function.
|
||||
Report func(Diagnostic)
|
||||
|
||||
// ResultOf provides the inputs to this analysis pass, which are
|
||||
// the corresponding results of its prerequisite analyzers.
|
||||
// The map keys are the elements of Analysis.Required,
|
||||
// and the type of each corresponding value is the required
|
||||
// analysis's ResultType.
|
||||
ResultOf map[*Analyzer]interface{}
|
||||
|
||||
// -- facts --
|
||||
|
||||
// ImportObjectFact retrieves a fact associated with obj.
|
||||
// Given a value ptr of type *T, where *T satisfies Fact,
|
||||
// ImportObjectFact copies the value to *ptr.
|
||||
//
|
||||
// ImportObjectFact panics if called after the pass is complete.
|
||||
// ImportObjectFact is not concurrency-safe.
|
||||
ImportObjectFact func(obj types.Object, fact Fact) bool
|
||||
|
||||
// ImportPackageFact retrieves a fact associated with package pkg,
|
||||
// which must be this package or one of its dependencies.
|
||||
// See comments for ImportObjectFact.
|
||||
ImportPackageFact func(pkg *types.Package, fact Fact) bool
|
||||
|
||||
// ExportObjectFact associates a fact of type *T with the obj,
|
||||
// replacing any previous fact of that type.
|
||||
//
|
||||
// ExportObjectFact panics if it is called after the pass is
|
||||
// complete, or if obj does not belong to the package being analyzed.
|
||||
// ExportObjectFact is not concurrency-safe.
|
||||
ExportObjectFact func(obj types.Object, fact Fact)
|
||||
|
||||
// ExportPackageFact associates a fact with the current package.
|
||||
// See comments for ExportObjectFact.
|
||||
ExportPackageFact func(fact Fact)
|
||||
|
||||
/* Further fields may be added in future. */
|
||||
// For example, suggested or applied refactorings.
|
||||
}
|
||||
|
||||
// Reportf is a helper function that reports a Diagnostic using the
|
||||
// specified position and formatted error message.
|
||||
func (pass *Pass) Reportf(pos token.Pos, format string, args ...interface{}) {
|
||||
msg := fmt.Sprintf(format, args...)
|
||||
pass.Report(Diagnostic{Pos: pos, Message: msg})
|
||||
}
|
||||
|
||||
func (pass *Pass) String() string {
|
||||
return fmt.Sprintf("%s@%s", pass.Analyzer.Name, pass.Pkg.Path())
|
||||
}
|
||||
|
||||
// A Fact is an intermediate fact produced during analysis.
|
||||
//
|
||||
// Each fact is associated with a named declaration (a types.Object) or
|
||||
// with a package as a whole. A single object or package may have
|
||||
// multiple associated facts, but only one of any particular fact type.
|
||||
//
|
||||
// A Fact represents a predicate such as "never returns", but does not
|
||||
// represent the subject of the predicate such as "function F" or "package P".
|
||||
//
|
||||
// Facts may be produced in one analysis pass and consumed by another
|
||||
// analysis pass even if these are in different address spaces.
|
||||
// If package P imports Q, all facts about Q produced during
|
||||
// analysis of that package will be available during later analysis of P.
|
||||
// Facts are analogous to type export data in a build system:
|
||||
// just as export data enables separate compilation of several passes,
|
||||
// facts enable "separate analysis".
|
||||
//
|
||||
// Each pass (a, p) starts with the set of facts produced by the
|
||||
// same analyzer a applied to the packages directly imported by p.
|
||||
// The analysis may add facts to the set, and they may be exported in turn.
|
||||
// An analysis's Run function may retrieve facts by calling
|
||||
// Pass.Import{Object,Package}Fact and update them using
|
||||
// Pass.Export{Object,Package}Fact.
|
||||
//
|
||||
// A fact is logically private to its Analysis. To pass values
|
||||
// between different analyzers, use the results mechanism;
|
||||
// see Analyzer.Requires, Analyzer.ResultType, and Pass.ResultOf.
|
||||
//
|
||||
// A Fact type must be a pointer.
|
||||
// Facts are encoded and decoded using encoding/gob.
|
||||
// A Fact may implement the GobEncoder/GobDecoder interfaces
|
||||
// to customize its encoding. Fact encoding should not fail.
|
||||
//
|
||||
// A Fact should not be modified once exported.
|
||||
type Fact interface {
|
||||
AFact() // dummy method to avoid type errors
|
||||
}
|
||||
|
||||
// A Diagnostic is a message associated with a source location.
|
||||
//
|
||||
// An Analyzer may return a variety of diagnostics; the optional Category,
|
||||
// which should be a constant, may be used to classify them.
|
||||
// It is primarily intended to make it easy to look up documentation.
|
||||
type Diagnostic struct {
|
||||
Pos token.Pos
|
||||
Category string // optional
|
||||
Message string
|
||||
}
|
336
vendor/golang.org/x/tools/go/analysis/doc.go
generated
vendored
Normal file
336
vendor/golang.org/x/tools/go/analysis/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,336 @@
|
|||
/*
|
||||
|
||||
The analysis package defines the interface between a modular static
|
||||
analysis and an analysis driver program.
|
||||
|
||||
Background
|
||||
|
||||
A static analysis is a function that inspects a package of Go code and
|
||||
reports a set of diagnostics (typically mistakes in the code), and
|
||||
perhaps produces other results as well, such as suggested refactorings
|
||||
or other facts. An analysis that reports mistakes is informally called a
|
||||
"checker". For example, the printf checker reports mistakes in
|
||||
fmt.Printf format strings.
|
||||
|
||||
A "modular" analysis is one that inspects one package at a time but can
|
||||
save information from a lower-level package and use it when inspecting a
|
||||
higher-level package, analogous to separate compilation in a toolchain.
|
||||
The printf checker is modular: when it discovers that a function such as
|
||||
log.Fatalf delegates to fmt.Printf, it records this fact, and checks
|
||||
calls to that function too, including calls made from another package.
|
||||
|
||||
By implementing a common interface, checkers from a variety of sources
|
||||
can be easily selected, incorporated, and reused in a wide range of
|
||||
driver programs including command-line tools (such as vet), text editors and
|
||||
IDEs, build and test systems (such as go build, Bazel, or Buck), test
|
||||
frameworks, code review tools, code-base indexers (such as SourceGraph),
|
||||
documentation viewers (such as godoc), batch pipelines for large code
|
||||
bases, and so on.
|
||||
|
||||
|
||||
Analyzer
|
||||
|
||||
The primary type in the API is Analyzer. An Analyzer statically
|
||||
describes an analysis function: its name, documentation, flags,
|
||||
relationship to other analyzers, and of course, its logic.
|
||||
|
||||
To define an analysis, a user declares a (logically constant) variable
|
||||
of type Analyzer. Here is a typical example from one of the analyzers in
|
||||
the go/analysis/passes/ subdirectory:
|
||||
|
||||
package unusedresult
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "unusedresult",
|
||||
Doc: "check for unused results of calls to some functions",
|
||||
Run: run,
|
||||
...
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
...
|
||||
}
|
||||
|
||||
|
||||
An analysis driver is a program such as vet that runs a set of
|
||||
analyses and prints the diagnostics that they report.
|
||||
The driver program must import the list of Analyzers it needs.
|
||||
Typically each Analyzer resides in a separate package.
|
||||
To add a new Analyzer to an existing driver, add another item to the list:
|
||||
|
||||
import ( "unusedresult"; "nilness"; "printf" )
|
||||
|
||||
var analyses = []*analysis.Analyzer{
|
||||
unusedresult.Analyzer,
|
||||
nilness.Analyzer,
|
||||
printf.Analyzer,
|
||||
}
|
||||
|
||||
A driver may use the name, flags, and documentation to provide on-line
|
||||
help that describes the analyses its performs.
|
||||
The doc comment contains a brief one-line summary,
|
||||
optionally followed by paragraphs of explanation.
|
||||
The vet command, shown below, is an example of a driver that runs
|
||||
multiple analyzers. It is based on the multichecker package
|
||||
(see the "Standalone commands" section for details).
|
||||
|
||||
$ go build golang.org/x/tools/go/analysis/cmd/vet
|
||||
$ ./vet help
|
||||
vet is a tool for static analysis of Go programs.
|
||||
|
||||
Usage: vet [-flag] [package]
|
||||
|
||||
Registered analyzers:
|
||||
|
||||
asmdecl report mismatches between assembly files and Go declarations
|
||||
assign check for useless assignments
|
||||
atomic check for common mistakes using the sync/atomic package
|
||||
...
|
||||
unusedresult check for unused results of calls to some functions
|
||||
|
||||
$ ./vet help unusedresult
|
||||
unusedresult: check for unused results of calls to some functions
|
||||
|
||||
Analyzer flags:
|
||||
|
||||
-unusedresult.funcs value
|
||||
comma-separated list of functions whose results must be used (default Error,String)
|
||||
-unusedresult.stringmethods value
|
||||
comma-separated list of names of methods of type func() string whose results must be used
|
||||
|
||||
Some functions like fmt.Errorf return a result and have no side effects,
|
||||
so it is always a mistake to discard the result. This analyzer reports
|
||||
calls to certain functions in which the result of the call is ignored.
|
||||
|
||||
The set of functions may be controlled using flags.
|
||||
|
||||
The Analyzer type has more fields besides those shown above:
|
||||
|
||||
type Analyzer struct {
|
||||
Name string
|
||||
Doc string
|
||||
Flags flag.FlagSet
|
||||
Run func(*Pass) (interface{}, error)
|
||||
RunDespiteErrors bool
|
||||
ResultType reflect.Type
|
||||
Requires []*Analyzer
|
||||
FactTypes []Fact
|
||||
}
|
||||
|
||||
The Flags field declares a set of named (global) flag variables that
|
||||
control analysis behavior. Unlike vet, analysis flags are not declared
|
||||
directly in the command line FlagSet; it is up to the driver to set the
|
||||
flag variables. A driver for a single analysis, a, might expose its flag
|
||||
f directly on the command line as -f, whereas a driver for multiple
|
||||
analyses might prefix the flag name by the analysis name (-a.f) to avoid
|
||||
ambiguity. An IDE might expose the flags through a graphical interface,
|
||||
and a batch pipeline might configure them from a config file.
|
||||
See the "findcall" analyzer for an example of flags in action.
|
||||
|
||||
The RunDespiteErrors flag indicates whether the analysis is equipped to
|
||||
handle ill-typed code. If not, the driver will skip the analysis if
|
||||
there were parse or type errors.
|
||||
The optional ResultType field specifies the type of the result value
|
||||
computed by this analysis and made available to other analyses.
|
||||
The Requires field specifies a list of analyses upon which
|
||||
this one depends and whose results it may access, and it constrains the
|
||||
order in which a driver may run analyses.
|
||||
The FactTypes field is discussed in the section on Modularity.
|
||||
The analysis package provides a Validate function to perform basic
|
||||
sanity checks on an Analyzer, such as that its Requires graph is
|
||||
acyclic, its fact and result types are unique, and so on.
|
||||
|
||||
Finally, the Run field contains a function to be called by the driver to
|
||||
execute the analysis on a single package. The driver passes it an
|
||||
instance of the Pass type.
|
||||
|
||||
|
||||
Pass
|
||||
|
||||
A Pass describes a single unit of work: the application of a particular
|
||||
Analyzer to a particular package of Go code.
|
||||
The Pass provides information to the Analyzer's Run function about the
|
||||
package being analyzed, and provides operations to the Run function for
|
||||
reporting diagnostics and other information back to the driver.
|
||||
|
||||
type Pass struct {
|
||||
Fset *token.FileSet
|
||||
Files []*ast.File
|
||||
OtherFiles []string
|
||||
Pkg *types.Package
|
||||
TypesInfo *types.Info
|
||||
ResultOf map[*Analyzer]interface{}
|
||||
Report func(Diagnostic)
|
||||
...
|
||||
}
|
||||
|
||||
The Fset, Files, Pkg, and TypesInfo fields provide the syntax trees,
|
||||
type information, and source positions for a single package of Go code.
|
||||
|
||||
The OtherFiles field provides the names, but not the contents, of non-Go
|
||||
files such as assembly that are part of this package. See the "asmdecl"
|
||||
or "buildtags" analyzers for examples of loading non-Go files and report
|
||||
diagnostics against them.
|
||||
|
||||
The ResultOf field provides the results computed by the analyzers
|
||||
required by this one, as expressed in its Analyzer.Requires field. The
|
||||
driver runs the required analyzers first and makes their results
|
||||
available in this map. Each Analyzer must return a value of the type
|
||||
described in its Analyzer.ResultType field.
|
||||
For example, the "ctrlflow" analyzer returns a *ctrlflow.CFGs, which
|
||||
provides a control-flow graph for each function in the package (see
|
||||
golang.org/x/tools/go/cfg); the "inspect" analyzer returns a value that
|
||||
enables other Analyzers to traverse the syntax trees of the package more
|
||||
efficiently; and the "buildssa" analyzer constructs an SSA-form
|
||||
intermediate representation.
|
||||
Each of these Analyzers extends the capabilities of later Analyzers
|
||||
without adding a dependency to the core API, so an analysis tool pays
|
||||
only for the extensions it needs.
|
||||
|
||||
The Report function emits a diagnostic, a message associated with a
|
||||
source position. For most analyses, diagnostics are their primary
|
||||
result.
|
||||
For convenience, Pass provides a helper method, Reportf, to report a new
|
||||
diagnostic by formatting a string.
|
||||
Diagnostic is defined as:
|
||||
|
||||
type Diagnostic struct {
|
||||
Pos token.Pos
|
||||
Category string // optional
|
||||
Message string
|
||||
}
|
||||
|
||||
The optional Category field is a short identifier that classifies the
|
||||
kind of message when an analysis produces several kinds of diagnostic.
|
||||
|
||||
Most Analyzers inspect typed Go syntax trees, but a few, such as asmdecl
|
||||
and buildtag, inspect the raw text of Go source files or even non-Go
|
||||
files such as assembly. To report a diagnostic against a line of a
|
||||
raw text file, use the following sequence:
|
||||
|
||||
content, err := ioutil.ReadFile(filename)
|
||||
if err != nil { ... }
|
||||
tf := fset.AddFile(filename, -1, len(content))
|
||||
tf.SetLinesForContent(content)
|
||||
...
|
||||
pass.Reportf(tf.LineStart(line), "oops")
|
||||
|
||||
|
||||
Modular analysis with Facts
|
||||
|
||||
To improve efficiency and scalability, large programs are routinely
|
||||
built using separate compilation: units of the program are compiled
|
||||
separately, and recompiled only when one of their dependencies changes;
|
||||
independent modules may be compiled in parallel. The same technique may
|
||||
be applied to static analyses, for the same benefits. Such analyses are
|
||||
described as "modular".
|
||||
|
||||
A compiler’s type checker is an example of a modular static analysis.
|
||||
Many other checkers we would like to apply to Go programs can be
|
||||
understood as alternative or non-standard type systems. For example,
|
||||
vet's printf checker infers whether a function has the "printf wrapper"
|
||||
type, and it applies stricter checks to calls of such functions. In
|
||||
addition, it records which functions are printf wrappers for use by
|
||||
later analysis units to identify other printf wrappers by induction.
|
||||
A result such as “f is a printf wrapper” that is not interesting by
|
||||
itself but serves as a stepping stone to an interesting result (such as
|
||||
a diagnostic) is called a "fact".
|
||||
|
||||
The analysis API allows an analysis to define new types of facts, to
|
||||
associate facts of these types with objects (named entities) declared
|
||||
within the current package, or with the package as a whole, and to query
|
||||
for an existing fact of a given type associated with an object or
|
||||
package.
|
||||
|
||||
An Analyzer that uses facts must declare their types:
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "printf",
|
||||
FactTypes: []analysis.Fact{new(isWrapper)},
|
||||
...
|
||||
}
|
||||
|
||||
type isWrapper struct{} // => *types.Func f “is a printf wrapper”
|
||||
|
||||
A driver program ensures that facts for a pass’s dependencies are
|
||||
generated before analyzing the pass and are responsible for propagating
|
||||
facts between from one pass to another, possibly across address spaces.
|
||||
Consequently, Facts must be serializable. The API requires that drivers
|
||||
use the gob encoding, an efficient, robust, self-describing binary
|
||||
protocol. A fact type may implement the GobEncoder/GobDecoder interfaces
|
||||
if the default encoding is unsuitable. Facts should be stateless.
|
||||
|
||||
The Pass type has functions to import and export facts,
|
||||
associated either with an object or with a package:
|
||||
|
||||
type Pass struct {
|
||||
...
|
||||
ExportObjectFact func(types.Object, Fact)
|
||||
ImportObjectFact func(types.Object, Fact) bool
|
||||
|
||||
ExportPackageFact func(fact Fact)
|
||||
ImportPackageFact func(*types.Package, Fact) bool
|
||||
}
|
||||
|
||||
An Analyzer may only export facts associated with the current package or
|
||||
its objects, though it may import facts from any package or object that
|
||||
is an import dependency of the current package.
|
||||
|
||||
Conceptually, ExportObjectFact(obj, fact) inserts fact into a hidden map keyed by
|
||||
the pair (obj, TypeOf(fact)), and the ImportObjectFact function
|
||||
retrieves the entry from this map and copies its value into the variable
|
||||
pointed to by fact. This scheme assumes that the concrete type of fact
|
||||
is a pointer; this assumption is checked by the Validate function.
|
||||
See the "printf" analyzer for an example of object facts in action.
|
||||
|
||||
Some driver implementations (such as those based on Bazel and Blaze) do
|
||||
not currently apply analyzers to packages of the standard library.
|
||||
Therefore, for best results, analyzer authors should not rely on
|
||||
analysis facts being available for standard packages.
|
||||
For example, although the printf checker is capable of deducing during
|
||||
analysis of the log package that log.Printf is a printf-wrapper,
|
||||
this fact is built in to the analyzer so that it correctly checks
|
||||
calls to log.Printf even when run in a driver that does not apply
|
||||
it to standard packages. We plan to remove this limitation in future.
|
||||
|
||||
|
||||
Testing an Analyzer
|
||||
|
||||
The analysistest subpackage provides utilities for testing an Analyzer.
|
||||
In a few lines of code, it is possible to run an analyzer on a package
|
||||
of testdata files and check that it reported all the expected
|
||||
diagnostics and facts (and no more). Expectations are expressed using
|
||||
"// want ..." comments in the input code.
|
||||
|
||||
|
||||
Standalone commands
|
||||
|
||||
Analyzers are provided in the form of packages that a driver program is
|
||||
expected to import. The vet command imports a set of several analyzers,
|
||||
but users may wish to define their own analysis commands that perform
|
||||
additional checks. To simplify the task of creating an analysis command,
|
||||
either for a single analyzer or for a whole suite, we provide the
|
||||
singlechecker and multichecker subpackages.
|
||||
|
||||
The singlechecker package provides the main function for a command that
|
||||
runs one analyzer. By convention, each analyzer such as
|
||||
go/passes/findcall should be accompanied by a singlechecker-based
|
||||
command such as go/analysis/passes/findcall/cmd/findcall, defined in its
|
||||
entirety as:
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"golang.org/x/tools/go/analysis/passes/findcall"
|
||||
"golang.org/x/tools/go/analysis/singlechecker"
|
||||
)
|
||||
|
||||
func main() { singlechecker.Main(findcall.Analyzer) }
|
||||
|
||||
A tool that provides multiple analyzers can use multichecker in a
|
||||
similar way, giving it the list of Analyzers.
|
||||
|
||||
|
||||
|
||||
*/
|
||||
package analysis
|
|
@ -2,9 +2,9 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Identify mismatches between assembly files and Go func declarations.
|
||||
|
||||
package govet
|
||||
// Package asmdecl defines an Analyzer that reports mismatches between
|
||||
// assembly files and Go declarations.
|
||||
package asmdecl
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
@ -13,11 +13,21 @@ import (
|
|||
"go/build"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"log"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
|
||||
)
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "asmdecl",
|
||||
Doc: "report mismatches between assembly files and Go declarations",
|
||||
Run: run,
|
||||
}
|
||||
|
||||
// 'kind' is a kind of assembly variable.
|
||||
// The kinds 1, 2, 4, 8 stand for values of that size.
|
||||
type asmKind int
|
||||
|
@ -77,6 +87,7 @@ var (
|
|||
asmArchPpc64 = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true}
|
||||
asmArchPpc64LE = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true}
|
||||
asmArchS390X = asmArch{name: "s390x", bigEndian: true, stack: "R15", lr: true}
|
||||
asmArchWasm = asmArch{name: "wasm", bigEndian: false, stack: "SP", lr: false}
|
||||
|
||||
arches = []*asmArch{
|
||||
&asmArch386,
|
||||
|
@ -91,6 +102,7 @@ var (
|
|||
&asmArchPpc64,
|
||||
&asmArchPpc64LE,
|
||||
&asmArchS390X,
|
||||
&asmArchWasm,
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -98,7 +110,14 @@ func init() {
|
|||
for _, arch := range arches {
|
||||
arch.sizes = types.SizesFor("gc", arch.name)
|
||||
if arch.sizes == nil {
|
||||
panic("missing SizesFor for gc/" + arch.name)
|
||||
// TODO(adonovan): fix: now that asmdecl is not in the standard
|
||||
// library we cannot assume types.SizesFor is consistent with arches.
|
||||
// For now, assume 64-bit norms and print a warning.
|
||||
// But this warning should really be deferred until we attempt to use
|
||||
// arch, which is very unlikely. Better would be
|
||||
// to defer size computation until we have Pass.TypesSizes.
|
||||
arch.sizes = types.SizesFor("gc", "amd64")
|
||||
log.Printf("unknown architecture %s", arch.name)
|
||||
}
|
||||
arch.intSize = int(arch.sizes.Sizeof(types.Typ[types.Int]))
|
||||
arch.ptrSize = int(arch.sizes.Sizeof(types.Typ[types.UnsafePointer]))
|
||||
|
@ -118,48 +137,48 @@ var (
|
|||
ppc64Suff = re(`([BHWD])(ZU|Z|U|BR)?$`)
|
||||
)
|
||||
|
||||
func asmCheck(pkg *Package) {
|
||||
if !vet("asmdecl") {
|
||||
return
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
// No work if no assembly files.
|
||||
if !pkg.hasFileWithSuffix(".s") {
|
||||
return
|
||||
var sfiles []string
|
||||
for _, fname := range pass.OtherFiles {
|
||||
if strings.HasSuffix(fname, ".s") {
|
||||
sfiles = append(sfiles, fname)
|
||||
}
|
||||
}
|
||||
if sfiles == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Gather declarations. knownFunc[name][arch] is func description.
|
||||
knownFunc := make(map[string]map[string]*asmFunc)
|
||||
|
||||
for _, f := range pkg.files {
|
||||
if f.file != nil {
|
||||
for _, decl := range f.file.Decls {
|
||||
if decl, ok := decl.(*ast.FuncDecl); ok && decl.Body == nil {
|
||||
knownFunc[decl.Name.Name] = f.asmParseDecl(decl)
|
||||
}
|
||||
for _, f := range pass.Files {
|
||||
for _, decl := range f.Decls {
|
||||
if decl, ok := decl.(*ast.FuncDecl); ok && decl.Body == nil {
|
||||
knownFunc[decl.Name.Name] = asmParseDecl(pass, decl)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Files:
|
||||
for _, f := range pkg.files {
|
||||
if !strings.HasSuffix(f.name, ".s") {
|
||||
continue
|
||||
for _, fname := range sfiles {
|
||||
content, tf, err := analysisutil.ReadFile(pass.Fset, fname)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
Println("Checking file", f.name)
|
||||
|
||||
// Determine architecture from file name if possible.
|
||||
var arch string
|
||||
var archDef *asmArch
|
||||
for _, a := range arches {
|
||||
if strings.HasSuffix(f.name, "_"+a.name+".s") {
|
||||
if strings.HasSuffix(fname, "_"+a.name+".s") {
|
||||
arch = a.name
|
||||
archDef = a
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
lines := strings.SplitAfter(string(f.content), "\n")
|
||||
lines := strings.SplitAfter(string(content), "\n")
|
||||
var (
|
||||
fn *asmFunc
|
||||
fnName string
|
||||
|
@ -173,7 +192,7 @@ Files:
|
|||
if fn != nil && fn.vars["ret"] != nil && !haveRetArg && len(retLine) > 0 {
|
||||
v := fn.vars["ret"]
|
||||
for _, line := range retLine {
|
||||
f.Badf(token.NoPos, "%s:%d: [%s] %s: RET without writing to %d-byte ret+%d(FP)", f.name, line, arch, fnName, v.size, v.off)
|
||||
pass.Reportf(analysisutil.LineStart(tf, line), "[%s] %s: RET without writing to %d-byte ret+%d(FP)", arch, fnName, v.size, v.off)
|
||||
}
|
||||
}
|
||||
retLine = nil
|
||||
|
@ -182,7 +201,7 @@ Files:
|
|||
lineno++
|
||||
|
||||
badf := func(format string, args ...interface{}) {
|
||||
f.Badf(token.NoPos, "%s:%d: [%s] %s: %s", f.name, lineno, arch, fnName, fmt.Sprintf(format, args...))
|
||||
pass.Reportf(analysisutil.LineStart(tf, lineno), "[%s] %s: %s", arch, fnName, fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
if arch == "" {
|
||||
|
@ -225,16 +244,17 @@ Files:
|
|||
}
|
||||
}
|
||||
if arch == "" {
|
||||
f.Warnf(token.NoPos, "%s: cannot determine architecture for assembly file", f.name)
|
||||
log.Printf("%s: cannot determine architecture for assembly file", fname)
|
||||
continue Files
|
||||
}
|
||||
}
|
||||
fnName = m[2]
|
||||
if pkgName := strings.TrimSpace(m[1]); pkgName != "" {
|
||||
pathParts := strings.Split(pkgName, "∕")
|
||||
pkgName = pathParts[len(pathParts)-1]
|
||||
if pkgName != f.pkg.path {
|
||||
f.Warnf(token.NoPos, "%s:%d: [%s] cannot check cross-package assembly function: %s is in package %s", f.name, lineno, arch, fnName, pkgName)
|
||||
if pkgPath := strings.TrimSpace(m[1]); pkgPath != "" {
|
||||
// The assembler uses Unicode division slash within
|
||||
// identifiers to represent the directory separator.
|
||||
pkgPath = strings.Replace(pkgPath, "∕", "/", -1)
|
||||
if pkgPath != pass.Pkg.Path() {
|
||||
log.Printf("%s:%d: [%s] cannot check cross-package assembly function: %s is in package %s", fname, lineno, arch, fnName, pkgPath)
|
||||
fn = nil
|
||||
fnName = ""
|
||||
continue
|
||||
|
@ -356,6 +376,7 @@ Files:
|
|||
}
|
||||
flushRet()
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func asmKindForType(t types.Type, size int) asmKind {
|
||||
|
@ -470,7 +491,7 @@ func appendComponentsRecursive(arch *asmArch, t types.Type, cc []component, suff
|
|||
offsets := arch.sizes.Offsetsof(fields)
|
||||
elemoff := int(offsets[1])
|
||||
for i := 0; i < int(tu.Len()); i++ {
|
||||
cc = appendComponentsRecursive(arch, elem, cc, suffix+"_"+strconv.Itoa(i), i*elemoff)
|
||||
cc = appendComponentsRecursive(arch, elem, cc, suffix+"_"+strconv.Itoa(i), off+i*elemoff)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -478,7 +499,7 @@ func appendComponentsRecursive(arch *asmArch, t types.Type, cc []component, suff
|
|||
}
|
||||
|
||||
// asmParseDecl parses a function decl for expected assembly variables.
|
||||
func (f *File) asmParseDecl(decl *ast.FuncDecl) map[string]*asmFunc {
|
||||
func asmParseDecl(pass *analysis.Pass, decl *ast.FuncDecl) map[string]*asmFunc {
|
||||
var (
|
||||
arch *asmArch
|
||||
fn *asmFunc
|
||||
|
@ -487,10 +508,20 @@ func (f *File) asmParseDecl(decl *ast.FuncDecl) map[string]*asmFunc {
|
|||
|
||||
// addParams adds asmVars for each of the parameters in list.
|
||||
// isret indicates whether the list are the arguments or the return values.
|
||||
// TODO(adonovan): simplify by passing (*types.Signature).{Params,Results}
|
||||
// instead of list.
|
||||
addParams := func(list []*ast.Field, isret bool) {
|
||||
argnum := 0
|
||||
for _, fld := range list {
|
||||
t := f.pkg.types[fld.Type].Type
|
||||
t := pass.TypesInfo.Types[fld.Type].Type
|
||||
|
||||
// Work around https://golang.org/issue/28277.
|
||||
if t == nil {
|
||||
if ell, ok := fld.Type.(*ast.Ellipsis); ok {
|
||||
t = types.NewSlice(pass.TypesInfo.Types[ell.Elt].Type)
|
||||
}
|
||||
}
|
||||
|
||||
align := int(arch.sizes.Alignof(t))
|
||||
size := int(arch.sizes.Sizeof(t))
|
||||
offset += -offset & (align - 1)
|
68
vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go
generated
vendored
Normal file
68
vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go
generated
vendored
Normal file
|
@ -0,0 +1,68 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package assign defines an Analyzer that detects useless assignments.
|
||||
package assign
|
||||
|
||||
// TODO(adonovan): check also for assignments to struct fields inside
|
||||
// methods that are on T instead of *T.
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"reflect"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
const Doc = `check for useless assignments
|
||||
|
||||
This checker reports assignments of the form x = x or a[i] = a[i].
|
||||
These are almost always useless, and even when they aren't they are
|
||||
usually a mistake.`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "assign",
|
||||
Doc: Doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.AssignStmt)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
stmt := n.(*ast.AssignStmt)
|
||||
if stmt.Tok != token.ASSIGN {
|
||||
return // ignore :=
|
||||
}
|
||||
if len(stmt.Lhs) != len(stmt.Rhs) {
|
||||
// If LHS and RHS have different cardinality, they can't be the same.
|
||||
return
|
||||
}
|
||||
for i, lhs := range stmt.Lhs {
|
||||
rhs := stmt.Rhs[i]
|
||||
if analysisutil.HasSideEffects(pass.TypesInfo, lhs) ||
|
||||
analysisutil.HasSideEffects(pass.TypesInfo, rhs) {
|
||||
continue // expressions may not be equal
|
||||
}
|
||||
if reflect.TypeOf(lhs) != reflect.TypeOf(rhs) {
|
||||
continue // short-circuit the heavy-weight gofmt check
|
||||
}
|
||||
le := analysisutil.Format(pass.Fset, lhs)
|
||||
re := analysisutil.Format(pass.Fset, rhs)
|
||||
if le == re {
|
||||
pass.Reportf(stmt.Pos(), "self-assignment of %s to %s", re, le)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return nil, nil
|
||||
}
|
96
vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go
generated
vendored
Normal file
96
vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go
generated
vendored
Normal file
|
@ -0,0 +1,96 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package atomic defines an Analyzer that checks for common mistakes
|
||||
// using the sync/atomic package.
|
||||
package atomic
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
const Doc = `check for common mistakes using the sync/atomic package
|
||||
|
||||
The atomic checker looks for assignment statements of the form:
|
||||
|
||||
x = atomic.AddUint64(&x, 1)
|
||||
|
||||
which are not atomic.`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "atomic",
|
||||
Doc: Doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
RunDespiteErrors: true,
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.AssignStmt)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(node ast.Node) {
|
||||
n := node.(*ast.AssignStmt)
|
||||
if len(n.Lhs) != len(n.Rhs) {
|
||||
return
|
||||
}
|
||||
if len(n.Lhs) == 1 && n.Tok == token.DEFINE {
|
||||
return
|
||||
}
|
||||
|
||||
for i, right := range n.Rhs {
|
||||
call, ok := right.(*ast.CallExpr)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
sel, ok := call.Fun.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
pkgIdent, _ := sel.X.(*ast.Ident)
|
||||
pkgName, ok := pass.TypesInfo.Uses[pkgIdent].(*types.PkgName)
|
||||
if !ok || pkgName.Imported().Path() != "sync/atomic" {
|
||||
continue
|
||||
}
|
||||
|
||||
switch sel.Sel.Name {
|
||||
case "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr":
|
||||
checkAtomicAddAssignment(pass, n.Lhs[i], call)
|
||||
}
|
||||
}
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// checkAtomicAddAssignment walks the atomic.Add* method calls checking
|
||||
// for assigning the return value to the same variable being used in the
|
||||
// operation
|
||||
func checkAtomicAddAssignment(pass *analysis.Pass, left ast.Expr, call *ast.CallExpr) {
|
||||
if len(call.Args) != 2 {
|
||||
return
|
||||
}
|
||||
arg := call.Args[0]
|
||||
broken := false
|
||||
|
||||
gofmt := func(e ast.Expr) string { return analysisutil.Format(pass.Fset, e) }
|
||||
|
||||
if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND {
|
||||
broken = gofmt(left) == gofmt(uarg.X)
|
||||
} else if star, ok := left.(*ast.StarExpr); ok {
|
||||
broken = gofmt(star.X) == gofmt(arg)
|
||||
}
|
||||
|
||||
if broken {
|
||||
pass.Reportf(left.Pos(), "direct assignment to atomic value")
|
||||
}
|
||||
}
|
126
vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go
generated
vendored
Normal file
126
vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go
generated
vendored
Normal file
|
@ -0,0 +1,126 @@
|
|||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package atomicalign defines an Analyzer that checks for non-64-bit-aligned
|
||||
// arguments to sync/atomic functions. On non-32-bit platforms, those functions
|
||||
// panic if their argument variables are not 64-bit aligned. It is therefore
|
||||
// the caller's responsibility to arrange for 64-bit alignment of such variables.
|
||||
// See https://golang.org/pkg/sync/atomic/#pkg-note-BUG
|
||||
package atomicalign
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "atomicalign",
|
||||
Doc: "check for non-64-bits-aligned arguments to sync/atomic functions",
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
if 8*pass.TypesSizes.Sizeof(types.Typ[types.Uintptr]) == 64 {
|
||||
return nil, nil // 64-bit platform
|
||||
}
|
||||
if imports(pass.Pkg, "sync/atomic") == nil {
|
||||
return nil, nil // doesn't directly import sync/atomic
|
||||
}
|
||||
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.CallExpr)(nil),
|
||||
}
|
||||
|
||||
inspect.Preorder(nodeFilter, func(node ast.Node) {
|
||||
call := node.(*ast.CallExpr)
|
||||
sel, ok := call.Fun.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
pkgIdent, ok := sel.X.(*ast.Ident)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
pkgName, ok := pass.TypesInfo.Uses[pkgIdent].(*types.PkgName)
|
||||
if !ok || pkgName.Imported().Path() != "sync/atomic" {
|
||||
return
|
||||
}
|
||||
|
||||
switch sel.Sel.Name {
|
||||
case "AddInt64", "AddUint64",
|
||||
"LoadInt64", "LoadUint64",
|
||||
"StoreInt64", "StoreUint64",
|
||||
"SwapInt64", "SwapUint64",
|
||||
"CompareAndSwapInt64", "CompareAndSwapUint64":
|
||||
|
||||
// For all the listed functions, the expression to check is always the first function argument.
|
||||
check64BitAlignment(pass, sel.Sel.Name, call.Args[0])
|
||||
}
|
||||
})
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func check64BitAlignment(pass *analysis.Pass, funcName string, arg ast.Expr) {
|
||||
// Checks the argument is made of the address operator (&) applied to
|
||||
// to a struct field (as opposed to a variable as the first word of
|
||||
// uint64 and int64 variables can be relied upon to be 64-bit aligned.
|
||||
unary, ok := arg.(*ast.UnaryExpr)
|
||||
if !ok || unary.Op != token.AND {
|
||||
return
|
||||
}
|
||||
|
||||
// Retrieve the types.Struct in order to get the offset of the
|
||||
// atomically accessed field.
|
||||
sel, ok := unary.X.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
tvar, ok := pass.TypesInfo.Selections[sel].Obj().(*types.Var)
|
||||
if !ok || !tvar.IsField() {
|
||||
return
|
||||
}
|
||||
|
||||
stype, ok := pass.TypesInfo.Types[sel.X].Type.Underlying().(*types.Struct)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
var offset int64
|
||||
var fields []*types.Var
|
||||
for i := 0; i < stype.NumFields(); i++ {
|
||||
f := stype.Field(i)
|
||||
fields = append(fields, f)
|
||||
if f == tvar {
|
||||
// We're done, this is the field we were looking for,
|
||||
// no need to fill the fields slice further.
|
||||
offset = pass.TypesSizes.Offsetsof(fields)[i]
|
||||
break
|
||||
}
|
||||
}
|
||||
if offset&7 == 0 {
|
||||
return // 64-bit aligned
|
||||
}
|
||||
|
||||
pass.Reportf(arg.Pos(), "address of non 64-bit aligned field .%s passed to atomic.%s", tvar.Name(), funcName)
|
||||
}
|
||||
|
||||
// imports reports whether pkg has path among its direct imports.
|
||||
// It returns the imported package if so, or nil if not.
|
||||
// copied from passes/cgocall.
|
||||
func imports(pkg *types.Package, path string) *types.Package {
|
||||
for _, imp := range pkg.Imports() {
|
||||
if imp.Path() == path {
|
||||
return imp
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -2,41 +2,57 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains boolean condition tests.
|
||||
|
||||
package govet
|
||||
// Package bools defines an Analyzer that detects common mistakes
|
||||
// involving boolean operators.
|
||||
package bools
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("bool",
|
||||
"check for mistakes involving boolean operators",
|
||||
checkBool,
|
||||
binaryExpr)
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "bools",
|
||||
Doc: "check for common mistakes involving boolean operators",
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func checkBool(f *File, n ast.Node) {
|
||||
e := n.(*ast.BinaryExpr)
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
var op boolOp
|
||||
switch e.Op {
|
||||
case token.LOR:
|
||||
op = or
|
||||
case token.LAND:
|
||||
op = and
|
||||
default:
|
||||
return
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.BinaryExpr)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
e := n.(*ast.BinaryExpr)
|
||||
|
||||
comm := op.commutativeSets(f, e)
|
||||
for _, exprs := range comm {
|
||||
op.checkRedundant(f, exprs)
|
||||
op.checkSuspect(f, exprs)
|
||||
}
|
||||
var op boolOp
|
||||
switch e.Op {
|
||||
case token.LOR:
|
||||
op = or
|
||||
case token.LAND:
|
||||
op = and
|
||||
default:
|
||||
return
|
||||
}
|
||||
|
||||
// TODO(adonovan): this reports n(n-1)/2 errors for an
|
||||
// expression e||...||e of depth n. Fix.
|
||||
// See https://golang.org/issue/28086.
|
||||
comm := op.commutativeSets(pass.TypesInfo, e)
|
||||
for _, exprs := range comm {
|
||||
op.checkRedundant(pass, exprs)
|
||||
op.checkSuspect(pass, exprs)
|
||||
}
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
type boolOp struct {
|
||||
|
@ -54,14 +70,14 @@ var (
|
|||
// expressions in e that are connected by op.
|
||||
// For example, given 'a || b || f() || c || d' with the or op,
|
||||
// commutativeSets returns {{b, a}, {d, c}}.
|
||||
func (op boolOp) commutativeSets(f *File, e *ast.BinaryExpr) [][]ast.Expr {
|
||||
func (op boolOp) commutativeSets(info *types.Info, e *ast.BinaryExpr) [][]ast.Expr {
|
||||
exprs := op.split(e)
|
||||
|
||||
// Partition the slice of expressions into commutative sets.
|
||||
i := 0
|
||||
var sets [][]ast.Expr
|
||||
for j := 0; j <= len(exprs); j++ {
|
||||
if j == len(exprs) || hasSideEffects(f, exprs[j]) {
|
||||
if j == len(exprs) || hasSideEffects(info, exprs[j]) {
|
||||
if i < j {
|
||||
sets = append(sets, exprs[i:j])
|
||||
}
|
||||
|
@ -76,12 +92,12 @@ func (op boolOp) commutativeSets(f *File, e *ast.BinaryExpr) [][]ast.Expr {
|
|||
// e && e
|
||||
// e || e
|
||||
// Exprs must contain only side effect free expressions.
|
||||
func (op boolOp) checkRedundant(f *File, exprs []ast.Expr) {
|
||||
func (op boolOp) checkRedundant(pass *analysis.Pass, exprs []ast.Expr) {
|
||||
seen := make(map[string]bool)
|
||||
for _, e := range exprs {
|
||||
efmt := f.gofmt(e)
|
||||
efmt := analysisutil.Format(pass.Fset, e)
|
||||
if seen[efmt] {
|
||||
f.Badf(e.Pos(), "redundant %s: %s %s %s", op.name, efmt, op.tok, efmt)
|
||||
pass.Reportf(e.Pos(), "redundant %s: %s %s %s", op.name, efmt, op.tok, efmt)
|
||||
} else {
|
||||
seen[efmt] = true
|
||||
}
|
||||
|
@ -95,7 +111,7 @@ func (op boolOp) checkRedundant(f *File, exprs []ast.Expr) {
|
|||
// If c1 and c2 are the same then it's redundant;
|
||||
// if c1 and c2 are different then it's always true or always false.
|
||||
// Exprs must contain only side effect free expressions.
|
||||
func (op boolOp) checkSuspect(f *File, exprs []ast.Expr) {
|
||||
func (op boolOp) checkSuspect(pass *analysis.Pass, exprs []ast.Expr) {
|
||||
// seen maps from expressions 'x' to equality expressions 'x != c'.
|
||||
seen := make(map[string]string)
|
||||
|
||||
|
@ -114,21 +130,21 @@ func (op boolOp) checkSuspect(f *File, exprs []ast.Expr) {
|
|||
// code is written.
|
||||
var x ast.Expr
|
||||
switch {
|
||||
case f.pkg.types[bin.Y].Value != nil:
|
||||
case pass.TypesInfo.Types[bin.Y].Value != nil:
|
||||
x = bin.X
|
||||
case f.pkg.types[bin.X].Value != nil:
|
||||
case pass.TypesInfo.Types[bin.X].Value != nil:
|
||||
x = bin.Y
|
||||
default:
|
||||
continue
|
||||
}
|
||||
|
||||
// e is of the form 'x != c' or 'x == c'.
|
||||
xfmt := f.gofmt(x)
|
||||
efmt := f.gofmt(e)
|
||||
xfmt := analysisutil.Format(pass.Fset, x)
|
||||
efmt := analysisutil.Format(pass.Fset, e)
|
||||
if prev, found := seen[xfmt]; found {
|
||||
// checkRedundant handles the case in which efmt == prev.
|
||||
if efmt != prev {
|
||||
f.Badf(e.Pos(), "suspect %s: %s %s %s", op.name, efmt, op.tok, prev)
|
||||
pass.Reportf(e.Pos(), "suspect %s: %s %s %s", op.name, efmt, op.tok, prev)
|
||||
}
|
||||
} else {
|
||||
seen[xfmt] = efmt
|
||||
|
@ -137,26 +153,27 @@ func (op boolOp) checkSuspect(f *File, exprs []ast.Expr) {
|
|||
}
|
||||
|
||||
// hasSideEffects reports whether evaluation of e has side effects.
|
||||
func hasSideEffects(f *File, e ast.Expr) bool {
|
||||
func hasSideEffects(info *types.Info, e ast.Expr) bool {
|
||||
safe := true
|
||||
ast.Inspect(e, func(node ast.Node) bool {
|
||||
switch n := node.(type) {
|
||||
case *ast.CallExpr:
|
||||
// Don't call Type.Underlying(), since its lack
|
||||
// lets us see the NamedFuncType(x) type
|
||||
// conversion as a *types.Named.
|
||||
_, ok := f.pkg.types[n.Fun].Type.(*types.Signature)
|
||||
if ok {
|
||||
// Conservatively assume that all function and
|
||||
// method calls have side effects for
|
||||
// now. This will include func type
|
||||
// conversions, but it's ok given that
|
||||
// this is the conservative side.
|
||||
typVal := info.Types[n.Fun]
|
||||
switch {
|
||||
case typVal.IsType():
|
||||
// Type conversion, which is safe.
|
||||
case typVal.IsBuiltin():
|
||||
// Builtin func, conservatively assumed to not
|
||||
// be safe for now.
|
||||
safe = false
|
||||
return false
|
||||
default:
|
||||
// A non-builtin func or method call.
|
||||
// Conservatively assume that all of them have
|
||||
// side effects for now.
|
||||
safe = false
|
||||
return false
|
||||
}
|
||||
// It's a type conversion, which cannot
|
||||
// have side effects.
|
||||
case *ast.UnaryExpr:
|
||||
if n.Op == token.ARROW {
|
||||
safe = false
|
117
vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go
generated
vendored
Normal file
117
vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go
generated
vendored
Normal file
|
@ -0,0 +1,117 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package buildssa defines an Analyzer that constructs the SSA
|
||||
// representation of an error-free package and returns the set of all
|
||||
// functions within it. It does not report any diagnostics itself but
|
||||
// may be used as an input to other analyzers.
|
||||
//
|
||||
// THIS INTERFACE IS EXPERIMENTAL AND MAY BE SUBJECT TO INCOMPATIBLE CHANGE.
|
||||
package buildssa
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/types"
|
||||
"reflect"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/ssa"
|
||||
)
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "buildssa",
|
||||
Doc: "build SSA-form IR for later passes",
|
||||
Run: run,
|
||||
ResultType: reflect.TypeOf(new(SSA)),
|
||||
}
|
||||
|
||||
// SSA provides SSA-form intermediate representation for all the
|
||||
// non-blank source functions in the current package.
|
||||
type SSA struct {
|
||||
Pkg *ssa.Package
|
||||
SrcFuncs []*ssa.Function
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
// Plundered from ssautil.BuildPackage.
|
||||
|
||||
// We must create a new Program for each Package because the
|
||||
// analysis API provides no place to hang a Program shared by
|
||||
// all Packages. Consequently, SSA Packages and Functions do not
|
||||
// have a canonical representation across an analysis session of
|
||||
// multiple packages. This is unlikely to be a problem in
|
||||
// practice because the analysis API essentially forces all
|
||||
// packages to be analysed independently, so any given call to
|
||||
// Analysis.Run on a package will see only SSA objects belonging
|
||||
// to a single Program.
|
||||
|
||||
// Some Analyzers may need GlobalDebug, in which case we'll have
|
||||
// to set it globally, but let's wait till we need it.
|
||||
mode := ssa.BuilderMode(0)
|
||||
|
||||
prog := ssa.NewProgram(pass.Fset, mode)
|
||||
|
||||
// Create SSA packages for all imports.
|
||||
// Order is not significant.
|
||||
created := make(map[*types.Package]bool)
|
||||
var createAll func(pkgs []*types.Package)
|
||||
createAll = func(pkgs []*types.Package) {
|
||||
for _, p := range pkgs {
|
||||
if !created[p] {
|
||||
created[p] = true
|
||||
prog.CreatePackage(p, nil, nil, true)
|
||||
createAll(p.Imports())
|
||||
}
|
||||
}
|
||||
}
|
||||
createAll(pass.Pkg.Imports())
|
||||
|
||||
// Create and build the primary package.
|
||||
ssapkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false)
|
||||
ssapkg.Build()
|
||||
|
||||
// Compute list of source functions, including literals,
|
||||
// in source order.
|
||||
var funcs []*ssa.Function
|
||||
for _, f := range pass.Files {
|
||||
for _, decl := range f.Decls {
|
||||
if fdecl, ok := decl.(*ast.FuncDecl); ok {
|
||||
|
||||
// SSA will not build a Function
|
||||
// for a FuncDecl named blank.
|
||||
// That's arguably too strict but
|
||||
// relaxing it would break uniqueness of
|
||||
// names of package members.
|
||||
if fdecl.Name.Name == "_" {
|
||||
continue
|
||||
}
|
||||
|
||||
// (init functions have distinct Func
|
||||
// objects named "init" and distinct
|
||||
// ssa.Functions named "init#1", ...)
|
||||
|
||||
fn := pass.TypesInfo.Defs[fdecl.Name].(*types.Func)
|
||||
if fn == nil {
|
||||
panic(fn)
|
||||
}
|
||||
|
||||
f := ssapkg.Prog.FuncValue(fn)
|
||||
if f == nil {
|
||||
panic(fn)
|
||||
}
|
||||
|
||||
var addAnons func(f *ssa.Function)
|
||||
addAnons = func(f *ssa.Function) {
|
||||
funcs = append(funcs, f)
|
||||
for _, anon := range f.AnonFuncs {
|
||||
addAnons(anon)
|
||||
}
|
||||
}
|
||||
addAnons(f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &SSA{Pkg: ssapkg, SrcFuncs: funcs}, nil
|
||||
}
|
159
vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go
generated
vendored
Normal file
159
vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go
generated
vendored
Normal file
|
@ -0,0 +1,159 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package buildtag defines an Analyzer that checks build tags.
|
||||
package buildtag
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
|
||||
)
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "buildtag",
|
||||
Doc: "check that +build tags are well-formed and correctly located",
|
||||
Run: runBuildTag,
|
||||
}
|
||||
|
||||
func runBuildTag(pass *analysis.Pass) (interface{}, error) {
|
||||
for _, f := range pass.Files {
|
||||
checkGoFile(pass, f)
|
||||
}
|
||||
for _, name := range pass.OtherFiles {
|
||||
if err := checkOtherFile(pass, name); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func checkGoFile(pass *analysis.Pass, f *ast.File) {
|
||||
pastCutoff := false
|
||||
for _, group := range f.Comments {
|
||||
// A +build comment is ignored after or adjoining the package declaration.
|
||||
if group.End()+1 >= f.Package {
|
||||
pastCutoff = true
|
||||
}
|
||||
|
||||
// "+build" is ignored within or after a /*...*/ comment.
|
||||
if !strings.HasPrefix(group.List[0].Text, "//") {
|
||||
pastCutoff = true
|
||||
continue
|
||||
}
|
||||
|
||||
// Check each line of a //-comment.
|
||||
for _, c := range group.List {
|
||||
if !strings.Contains(c.Text, "+build") {
|
||||
continue
|
||||
}
|
||||
if err := checkLine(c.Text, pastCutoff); err != nil {
|
||||
pass.Reportf(c.Pos(), "%s", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func checkOtherFile(pass *analysis.Pass, filename string) error {
|
||||
content, tf, err := analysisutil.ReadFile(pass.Fset, filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// We must look at the raw lines, as build tags may appear in non-Go
|
||||
// files such as assembly files.
|
||||
lines := bytes.SplitAfter(content, nl)
|
||||
|
||||
// Determine cutpoint where +build comments are no longer valid.
|
||||
// They are valid in leading // comments in the file followed by
|
||||
// a blank line.
|
||||
//
|
||||
// This must be done as a separate pass because of the
|
||||
// requirement that the comment be followed by a blank line.
|
||||
var cutoff int
|
||||
for i, line := range lines {
|
||||
line = bytes.TrimSpace(line)
|
||||
if !bytes.HasPrefix(line, slashSlash) {
|
||||
if len(line) > 0 {
|
||||
break
|
||||
}
|
||||
cutoff = i
|
||||
}
|
||||
}
|
||||
|
||||
for i, line := range lines {
|
||||
line = bytes.TrimSpace(line)
|
||||
if !bytes.HasPrefix(line, slashSlash) {
|
||||
continue
|
||||
}
|
||||
if !bytes.Contains(line, []byte("+build")) {
|
||||
continue
|
||||
}
|
||||
if err := checkLine(string(line), i >= cutoff); err != nil {
|
||||
pass.Reportf(analysisutil.LineStart(tf, i+1), "%s", err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// checkLine checks a line that starts with "//" and contains "+build".
|
||||
func checkLine(line string, pastCutoff bool) error {
|
||||
line = strings.TrimPrefix(line, "//")
|
||||
line = strings.TrimSpace(line)
|
||||
|
||||
if strings.HasPrefix(line, "+build") {
|
||||
fields := strings.Fields(line)
|
||||
if fields[0] != "+build" {
|
||||
// Comment is something like +buildasdf not +build.
|
||||
return fmt.Errorf("possible malformed +build comment")
|
||||
}
|
||||
if pastCutoff {
|
||||
return fmt.Errorf("+build comment must appear before package clause and be followed by a blank line")
|
||||
}
|
||||
if err := checkArguments(fields); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// Comment with +build but not at beginning.
|
||||
if !pastCutoff {
|
||||
return fmt.Errorf("possible malformed +build comment")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func checkArguments(fields []string) error {
|
||||
// The original version of this checker in vet could examine
|
||||
// files with malformed build tags that would cause the file to
|
||||
// be always ignored by "go build". However, drivers for the new
|
||||
// analysis API will analyze only the files selected to form a
|
||||
// package, so these checks will never fire.
|
||||
// TODO(adonovan): rethink this.
|
||||
|
||||
for _, arg := range fields[1:] {
|
||||
for _, elem := range strings.Split(arg, ",") {
|
||||
if strings.HasPrefix(elem, "!!") {
|
||||
return fmt.Errorf("invalid double negative in build constraint: %s", arg)
|
||||
}
|
||||
elem = strings.TrimPrefix(elem, "!")
|
||||
for _, c := range elem {
|
||||
if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' {
|
||||
return fmt.Errorf("invalid non-alphanumeric build constraint: %s", arg)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
nl = []byte("\n")
|
||||
slashSlash = []byte("//")
|
||||
)
|
388
vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go
generated
vendored
Normal file
388
vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go
generated
vendored
Normal file
|
@ -0,0 +1,388 @@
|
|||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package cgocall defines an Analyzer that detects some violations of
|
||||
// the cgo pointer passing rules.
|
||||
package cgocall
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/format"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"log"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
|
||||
)
|
||||
|
||||
const debug = false
|
||||
|
||||
const doc = `detect some violations of the cgo pointer passing rules
|
||||
|
||||
Check for invalid cgo pointer passing.
|
||||
This looks for code that uses cgo to call C code passing values
|
||||
whose types are almost always invalid according to the cgo pointer
|
||||
sharing rules.
|
||||
Specifically, it warns about attempts to pass a Go chan, map, func,
|
||||
or slice to C, either directly, or via a pointer, array, or struct.`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "cgocall",
|
||||
Doc: doc,
|
||||
RunDespiteErrors: true,
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
if imports(pass.Pkg, "runtime/cgo") == nil {
|
||||
return nil, nil // doesn't use cgo
|
||||
}
|
||||
|
||||
cgofiles, info, err := typeCheckCgoSourceFiles(pass.Fset, pass.Pkg, pass.Files, pass.TypesInfo, pass.TypesSizes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, f := range cgofiles {
|
||||
checkCgo(pass.Fset, f, info, pass.Reportf)
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(token.Pos, string, ...interface{})) {
|
||||
ast.Inspect(f, func(n ast.Node) bool {
|
||||
call, ok := n.(*ast.CallExpr)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
|
||||
// Is this a C.f() call?
|
||||
var name string
|
||||
if sel, ok := analysisutil.Unparen(call.Fun).(*ast.SelectorExpr); ok {
|
||||
if id, ok := sel.X.(*ast.Ident); ok && id.Name == "C" {
|
||||
name = sel.Sel.Name
|
||||
}
|
||||
}
|
||||
if name == "" {
|
||||
return true // not a call we need to check
|
||||
}
|
||||
|
||||
// A call to C.CBytes passes a pointer but is always safe.
|
||||
if name == "CBytes" {
|
||||
return true
|
||||
}
|
||||
|
||||
if debug {
|
||||
log.Printf("%s: call to C.%s", fset.Position(call.Lparen), name)
|
||||
}
|
||||
|
||||
for _, arg := range call.Args {
|
||||
if !typeOKForCgoCall(cgoBaseType(info, arg), make(map[types.Type]bool)) {
|
||||
reportf(arg.Pos(), "possibly passing Go type with embedded pointer to C")
|
||||
break
|
||||
}
|
||||
|
||||
// Check for passing the address of a bad type.
|
||||
if conv, ok := arg.(*ast.CallExpr); ok && len(conv.Args) == 1 &&
|
||||
isUnsafePointer(info, conv.Fun) {
|
||||
arg = conv.Args[0]
|
||||
}
|
||||
if u, ok := arg.(*ast.UnaryExpr); ok && u.Op == token.AND {
|
||||
if !typeOKForCgoCall(cgoBaseType(info, u.X), make(map[types.Type]bool)) {
|
||||
reportf(arg.Pos(), "possibly passing Go type with embedded pointer to C")
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// typeCheckCgoSourceFiles returns type-checked syntax trees for the raw
|
||||
// cgo files of a package (those that import "C"). Such files are not
|
||||
// Go, so there may be gaps in type information around C.f references.
|
||||
//
|
||||
// This checker was initially written in vet to inpect raw cgo source
|
||||
// files using partial type information. However, Analyzers in the new
|
||||
// analysis API are presented with the type-checked, "cooked" Go ASTs
|
||||
// resulting from cgo-processing files, so we must choose between
|
||||
// working with the cooked file generated by cgo (which was tried but
|
||||
// proved fragile) or locating the raw cgo file (e.g. from //line
|
||||
// directives) and working with that, as we now do.
|
||||
//
|
||||
// Specifically, we must type-check the raw cgo source files (or at
|
||||
// least the subtrees needed for this analyzer) in an environment that
|
||||
// simulates the rest of the already type-checked package.
|
||||
//
|
||||
// For example, for each raw cgo source file in the original package,
|
||||
// such as this one:
|
||||
//
|
||||
// package p
|
||||
// import "C"
|
||||
// import "fmt"
|
||||
// type T int
|
||||
// const k = 3
|
||||
// var x, y = fmt.Println()
|
||||
// func f() { ... }
|
||||
// func g() { ... C.malloc(k) ... }
|
||||
// func (T) f(int) string { ... }
|
||||
//
|
||||
// we synthesize a new ast.File, shown below, that dot-imports the
|
||||
// orginal "cooked" package using a special name ("·this·"), so that all
|
||||
// references to package members resolve correctly. (References to
|
||||
// unexported names cause an "unexported" error, which we ignore.)
|
||||
//
|
||||
// To avoid shadowing names imported from the cooked package,
|
||||
// package-level declarations in the new source file are modified so
|
||||
// that they do not declare any names.
|
||||
// (The cgocall analysis is concerned with uses, not declarations.)
|
||||
// Specifically, type declarations are discarded;
|
||||
// all names in each var and const declaration are blanked out;
|
||||
// each method is turned into a regular function by turning
|
||||
// the receiver into the first parameter;
|
||||
// and all functions are renamed to "_".
|
||||
//
|
||||
// package p
|
||||
// import . "·this·" // declares T, k, x, y, f, g, T.f
|
||||
// import "C"
|
||||
// import "fmt"
|
||||
// const _ = 3
|
||||
// var _, _ = fmt.Println()
|
||||
// func _() { ... }
|
||||
// func _() { ... C.malloc(k) ... }
|
||||
// func _(T, int) string { ... }
|
||||
//
|
||||
// In this way, the raw function bodies and const/var initializer
|
||||
// expressions are preserved but refer to the "cooked" objects imported
|
||||
// from "·this·", and none of the transformed package-level declarations
|
||||
// actually declares anything. In the example above, the reference to k
|
||||
// in the argument of the call to C.malloc resolves to "·this·".k, which
|
||||
// has an accurate type.
|
||||
//
|
||||
// This approach could in principle be generalized to more complex
|
||||
// analyses on raw cgo files. One could synthesize a "C" package so that
|
||||
// C.f would resolve to "·this·"._C_func_f, for example. But we have
|
||||
// limited ourselves here to preserving function bodies and initializer
|
||||
// expressions since that is all that the cgocall analyzer needs.
|
||||
//
|
||||
func typeCheckCgoSourceFiles(fset *token.FileSet, pkg *types.Package, files []*ast.File, info *types.Info, sizes types.Sizes) ([]*ast.File, *types.Info, error) {
|
||||
const thispkg = "·this·"
|
||||
|
||||
// Which files are cgo files?
|
||||
var cgoFiles []*ast.File
|
||||
importMap := map[string]*types.Package{thispkg: pkg}
|
||||
for _, raw := range files {
|
||||
// If f is a cgo-generated file, Position reports
|
||||
// the original file, honoring //line directives.
|
||||
filename := fset.Position(raw.Pos()).Filename
|
||||
f, err := parser.ParseFile(fset, filename, nil, parser.Mode(0))
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("can't parse raw cgo file: %v", err)
|
||||
}
|
||||
found := false
|
||||
for _, spec := range f.Imports {
|
||||
if spec.Path.Value == `"C"` {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
continue // not a cgo file
|
||||
}
|
||||
|
||||
// Record the original import map.
|
||||
for _, spec := range raw.Imports {
|
||||
path, _ := strconv.Unquote(spec.Path.Value)
|
||||
importMap[path] = imported(info, spec)
|
||||
}
|
||||
|
||||
// Add special dot-import declaration:
|
||||
// import . "·this·"
|
||||
var decls []ast.Decl
|
||||
decls = append(decls, &ast.GenDecl{
|
||||
Tok: token.IMPORT,
|
||||
Specs: []ast.Spec{
|
||||
&ast.ImportSpec{
|
||||
Name: &ast.Ident{Name: "."},
|
||||
Path: &ast.BasicLit{
|
||||
Kind: token.STRING,
|
||||
Value: strconv.Quote(thispkg),
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Transform declarations from the raw cgo file.
|
||||
for _, decl := range f.Decls {
|
||||
switch decl := decl.(type) {
|
||||
case *ast.GenDecl:
|
||||
switch decl.Tok {
|
||||
case token.TYPE:
|
||||
// Discard type declarations.
|
||||
continue
|
||||
case token.IMPORT:
|
||||
// Keep imports.
|
||||
case token.VAR, token.CONST:
|
||||
// Blank the declared var/const names.
|
||||
for _, spec := range decl.Specs {
|
||||
spec := spec.(*ast.ValueSpec)
|
||||
for i := range spec.Names {
|
||||
spec.Names[i].Name = "_"
|
||||
}
|
||||
}
|
||||
}
|
||||
case *ast.FuncDecl:
|
||||
// Blank the declared func name.
|
||||
decl.Name.Name = "_"
|
||||
|
||||
// Turn a method receiver: func (T) f(P) R {...}
|
||||
// into regular parameter: func _(T, P) R {...}
|
||||
if decl.Recv != nil {
|
||||
var params []*ast.Field
|
||||
params = append(params, decl.Recv.List...)
|
||||
params = append(params, decl.Type.Params.List...)
|
||||
decl.Type.Params.List = params
|
||||
decl.Recv = nil
|
||||
}
|
||||
}
|
||||
decls = append(decls, decl)
|
||||
}
|
||||
f.Decls = decls
|
||||
if debug {
|
||||
format.Node(os.Stderr, fset, f) // debugging
|
||||
}
|
||||
cgoFiles = append(cgoFiles, f)
|
||||
}
|
||||
if cgoFiles == nil {
|
||||
return nil, nil, nil // nothing to do (can't happen?)
|
||||
}
|
||||
|
||||
// Type-check the synthetic files.
|
||||
tc := &types.Config{
|
||||
FakeImportC: true,
|
||||
Importer: importerFunc(func(path string) (*types.Package, error) {
|
||||
return importMap[path], nil
|
||||
}),
|
||||
Sizes: sizes,
|
||||
Error: func(error) {}, // ignore errors (e.g. unused import)
|
||||
}
|
||||
|
||||
// It's tempting to record the new types in the
|
||||
// existing pass.TypesInfo, but we don't own it.
|
||||
altInfo := &types.Info{
|
||||
Types: make(map[ast.Expr]types.TypeAndValue),
|
||||
}
|
||||
tc.Check(pkg.Path(), fset, cgoFiles, altInfo)
|
||||
|
||||
return cgoFiles, altInfo, nil
|
||||
}
|
||||
|
||||
// cgoBaseType tries to look through type conversions involving
|
||||
// unsafe.Pointer to find the real type. It converts:
|
||||
// unsafe.Pointer(x) => x
|
||||
// *(*unsafe.Pointer)(unsafe.Pointer(&x)) => x
|
||||
func cgoBaseType(info *types.Info, arg ast.Expr) types.Type {
|
||||
switch arg := arg.(type) {
|
||||
case *ast.CallExpr:
|
||||
if len(arg.Args) == 1 && isUnsafePointer(info, arg.Fun) {
|
||||
return cgoBaseType(info, arg.Args[0])
|
||||
}
|
||||
case *ast.StarExpr:
|
||||
call, ok := arg.X.(*ast.CallExpr)
|
||||
if !ok || len(call.Args) != 1 {
|
||||
break
|
||||
}
|
||||
// Here arg is *f(v).
|
||||
t := info.Types[call.Fun].Type
|
||||
if t == nil {
|
||||
break
|
||||
}
|
||||
ptr, ok := t.Underlying().(*types.Pointer)
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
// Here arg is *(*p)(v)
|
||||
elem, ok := ptr.Elem().Underlying().(*types.Basic)
|
||||
if !ok || elem.Kind() != types.UnsafePointer {
|
||||
break
|
||||
}
|
||||
// Here arg is *(*unsafe.Pointer)(v)
|
||||
call, ok = call.Args[0].(*ast.CallExpr)
|
||||
if !ok || len(call.Args) != 1 {
|
||||
break
|
||||
}
|
||||
// Here arg is *(*unsafe.Pointer)(f(v))
|
||||
if !isUnsafePointer(info, call.Fun) {
|
||||
break
|
||||
}
|
||||
// Here arg is *(*unsafe.Pointer)(unsafe.Pointer(v))
|
||||
u, ok := call.Args[0].(*ast.UnaryExpr)
|
||||
if !ok || u.Op != token.AND {
|
||||
break
|
||||
}
|
||||
// Here arg is *(*unsafe.Pointer)(unsafe.Pointer(&v))
|
||||
return cgoBaseType(info, u.X)
|
||||
}
|
||||
|
||||
return info.Types[arg].Type
|
||||
}
|
||||
|
||||
// typeOKForCgoCall reports whether the type of arg is OK to pass to a
|
||||
// C function using cgo. This is not true for Go types with embedded
|
||||
// pointers. m is used to avoid infinite recursion on recursive types.
|
||||
func typeOKForCgoCall(t types.Type, m map[types.Type]bool) bool {
|
||||
if t == nil || m[t] {
|
||||
return true
|
||||
}
|
||||
m[t] = true
|
||||
switch t := t.Underlying().(type) {
|
||||
case *types.Chan, *types.Map, *types.Signature, *types.Slice:
|
||||
return false
|
||||
case *types.Pointer:
|
||||
return typeOKForCgoCall(t.Elem(), m)
|
||||
case *types.Array:
|
||||
return typeOKForCgoCall(t.Elem(), m)
|
||||
case *types.Struct:
|
||||
for i := 0; i < t.NumFields(); i++ {
|
||||
if !typeOKForCgoCall(t.Field(i).Type(), m) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func isUnsafePointer(info *types.Info, e ast.Expr) bool {
|
||||
t := info.Types[e].Type
|
||||
return t != nil && t.Underlying() == types.Typ[types.UnsafePointer]
|
||||
}
|
||||
|
||||
type importerFunc func(path string) (*types.Package, error)
|
||||
|
||||
func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) }
|
||||
|
||||
// TODO(adonovan): make this a library function or method of Info.
|
||||
func imported(info *types.Info, spec *ast.ImportSpec) *types.Package {
|
||||
obj, ok := info.Implicits[spec]
|
||||
if !ok {
|
||||
obj = info.Defs[spec.Name] // renaming import
|
||||
}
|
||||
return obj.(*types.PkgName).Imported()
|
||||
}
|
||||
|
||||
// imports reports whether pkg has path among its direct imports.
|
||||
// It returns the imported package if so, or nil if not.
|
||||
// TODO(adonovan): move to analysisutil.
|
||||
func imports(pkg *types.Package, path string) *types.Package {
|
||||
for _, imp := range pkg.Imports() {
|
||||
if imp.Path() == path {
|
||||
return imp
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
117
vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go
generated
vendored
Normal file
117
vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go
generated
vendored
Normal file
|
@ -0,0 +1,117 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package composite defines an Analyzer that checks for unkeyed
|
||||
// composite literals.
|
||||
package composite
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/types"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
const Doc = `check for unkeyed composite literals
|
||||
|
||||
This analyzer reports a diagnostic for composite literals of struct
|
||||
types imported from another package that do not use the field-keyed
|
||||
syntax. Such literals are fragile because the addition of a new field
|
||||
(even if unexported) to the struct will cause compilation to fail.
|
||||
|
||||
As an example,
|
||||
|
||||
err = &net.DNSConfigError{err}
|
||||
|
||||
should be replaced by:
|
||||
|
||||
err = &net.DNSConfigError{Err: err}
|
||||
`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "composites",
|
||||
Doc: Doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
RunDespiteErrors: true,
|
||||
Run: run,
|
||||
}
|
||||
|
||||
var whitelist = true
|
||||
|
||||
func init() {
|
||||
Analyzer.Flags.BoolVar(&whitelist, "whitelist", whitelist, "use composite white list; for testing only")
|
||||
}
|
||||
|
||||
// runUnkeyedLiteral checks if a composite literal is a struct literal with
|
||||
// unkeyed fields.
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.CompositeLit)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
cl := n.(*ast.CompositeLit)
|
||||
|
||||
typ := pass.TypesInfo.Types[cl].Type
|
||||
if typ == nil {
|
||||
// cannot determine composite literals' type, skip it
|
||||
return
|
||||
}
|
||||
typeName := typ.String()
|
||||
if whitelist && unkeyedLiteral[typeName] {
|
||||
// skip whitelisted types
|
||||
return
|
||||
}
|
||||
under := typ.Underlying()
|
||||
for {
|
||||
ptr, ok := under.(*types.Pointer)
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
under = ptr.Elem().Underlying()
|
||||
}
|
||||
if _, ok := under.(*types.Struct); !ok {
|
||||
// skip non-struct composite literals
|
||||
return
|
||||
}
|
||||
if isLocalType(pass, typ) {
|
||||
// allow unkeyed locally defined composite literal
|
||||
return
|
||||
}
|
||||
|
||||
// check if the CompositeLit contains an unkeyed field
|
||||
allKeyValue := true
|
||||
for _, e := range cl.Elts {
|
||||
if _, ok := e.(*ast.KeyValueExpr); !ok {
|
||||
allKeyValue = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if allKeyValue {
|
||||
// all the composite literal fields are keyed
|
||||
return
|
||||
}
|
||||
|
||||
pass.Reportf(cl.Pos(), "%s composite literal uses unkeyed fields", typeName)
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func isLocalType(pass *analysis.Pass, typ types.Type) bool {
|
||||
switch x := typ.(type) {
|
||||
case *types.Struct:
|
||||
// struct literals are local types
|
||||
return true
|
||||
case *types.Pointer:
|
||||
return isLocalType(pass, x.Elem())
|
||||
case *types.Named:
|
||||
// names in package foo are local to foo_test too
|
||||
return strings.TrimSuffix(x.Obj().Pkg().Path(), "_test") == strings.TrimSuffix(pass.Pkg.Path(), "_test")
|
||||
}
|
||||
return false
|
||||
}
|
|
@ -2,12 +2,11 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package whitelist defines exceptions for the vet tool.
|
||||
package whitelist
|
||||
package composite
|
||||
|
||||
// UnkeyedLiteral is a white list of types in the standard packages
|
||||
// unkeyedLiteral is a white list of types in the standard packages
|
||||
// that are used with unkeyed literals we deem to be acceptable.
|
||||
var UnkeyedLiteral = map[string]bool{
|
||||
var unkeyedLiteral = map[string]bool{
|
||||
// These image and image/color struct types are frozen. We will never add fields to them.
|
||||
"image/color.Alpha16": true,
|
||||
"image/color.Alpha": true,
|
||||
|
@ -25,4 +24,10 @@ var UnkeyedLiteral = map[string]bool{
|
|||
"image.Uniform": true,
|
||||
|
||||
"unicode.Range16": true,
|
||||
|
||||
// These three structs are used in generated test main files,
|
||||
// but the generator can be trusted.
|
||||
"testing.InternalBenchmark": true,
|
||||
"testing.InternalExample": true,
|
||||
"testing.InternalTest": true,
|
||||
}
|
300
vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go
generated
vendored
Normal file
300
vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go
generated
vendored
Normal file
|
@ -0,0 +1,300 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package copylock defines an Analyzer that checks for locks
|
||||
// erroneously passed by value.
|
||||
package copylock
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
const Doc = `check for locks erroneously passed by value
|
||||
|
||||
Inadvertently copying a value containing a lock, such as sync.Mutex or
|
||||
sync.WaitGroup, may cause both copies to malfunction. Generally such
|
||||
values should be referred to through a pointer.`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "copylocks",
|
||||
Doc: Doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
RunDespiteErrors: true,
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.AssignStmt)(nil),
|
||||
(*ast.CallExpr)(nil),
|
||||
(*ast.CompositeLit)(nil),
|
||||
(*ast.FuncDecl)(nil),
|
||||
(*ast.FuncLit)(nil),
|
||||
(*ast.GenDecl)(nil),
|
||||
(*ast.RangeStmt)(nil),
|
||||
(*ast.ReturnStmt)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(node ast.Node) {
|
||||
switch node := node.(type) {
|
||||
case *ast.RangeStmt:
|
||||
checkCopyLocksRange(pass, node)
|
||||
case *ast.FuncDecl:
|
||||
checkCopyLocksFunc(pass, node.Name.Name, node.Recv, node.Type)
|
||||
case *ast.FuncLit:
|
||||
checkCopyLocksFunc(pass, "func", nil, node.Type)
|
||||
case *ast.CallExpr:
|
||||
checkCopyLocksCallExpr(pass, node)
|
||||
case *ast.AssignStmt:
|
||||
checkCopyLocksAssign(pass, node)
|
||||
case *ast.GenDecl:
|
||||
checkCopyLocksGenDecl(pass, node)
|
||||
case *ast.CompositeLit:
|
||||
checkCopyLocksCompositeLit(pass, node)
|
||||
case *ast.ReturnStmt:
|
||||
checkCopyLocksReturnStmt(pass, node)
|
||||
}
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// checkCopyLocksAssign checks whether an assignment
|
||||
// copies a lock.
|
||||
func checkCopyLocksAssign(pass *analysis.Pass, as *ast.AssignStmt) {
|
||||
for i, x := range as.Rhs {
|
||||
if path := lockPathRhs(pass, x); path != nil {
|
||||
pass.Reportf(x.Pos(), "assignment copies lock value to %v: %v", analysisutil.Format(pass.Fset, as.Lhs[i]), path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkCopyLocksGenDecl checks whether lock is copied
|
||||
// in variable declaration.
|
||||
func checkCopyLocksGenDecl(pass *analysis.Pass, gd *ast.GenDecl) {
|
||||
if gd.Tok != token.VAR {
|
||||
return
|
||||
}
|
||||
for _, spec := range gd.Specs {
|
||||
valueSpec := spec.(*ast.ValueSpec)
|
||||
for i, x := range valueSpec.Values {
|
||||
if path := lockPathRhs(pass, x); path != nil {
|
||||
pass.Reportf(x.Pos(), "variable declaration copies lock value to %v: %v", valueSpec.Names[i].Name, path)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkCopyLocksCompositeLit detects lock copy inside a composite literal
|
||||
func checkCopyLocksCompositeLit(pass *analysis.Pass, cl *ast.CompositeLit) {
|
||||
for _, x := range cl.Elts {
|
||||
if node, ok := x.(*ast.KeyValueExpr); ok {
|
||||
x = node.Value
|
||||
}
|
||||
if path := lockPathRhs(pass, x); path != nil {
|
||||
pass.Reportf(x.Pos(), "literal copies lock value from %v: %v", analysisutil.Format(pass.Fset, x), path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkCopyLocksReturnStmt detects lock copy in return statement
|
||||
func checkCopyLocksReturnStmt(pass *analysis.Pass, rs *ast.ReturnStmt) {
|
||||
for _, x := range rs.Results {
|
||||
if path := lockPathRhs(pass, x); path != nil {
|
||||
pass.Reportf(x.Pos(), "return copies lock value: %v", path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkCopyLocksCallExpr detects lock copy in the arguments to a function call
|
||||
func checkCopyLocksCallExpr(pass *analysis.Pass, ce *ast.CallExpr) {
|
||||
var id *ast.Ident
|
||||
switch fun := ce.Fun.(type) {
|
||||
case *ast.Ident:
|
||||
id = fun
|
||||
case *ast.SelectorExpr:
|
||||
id = fun.Sel
|
||||
}
|
||||
if fun, ok := pass.TypesInfo.Uses[id].(*types.Builtin); ok {
|
||||
switch fun.Name() {
|
||||
case "new", "len", "cap", "Sizeof":
|
||||
return
|
||||
}
|
||||
}
|
||||
for _, x := range ce.Args {
|
||||
if path := lockPathRhs(pass, x); path != nil {
|
||||
pass.Reportf(x.Pos(), "call of %s copies lock value: %v", analysisutil.Format(pass.Fset, ce.Fun), path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkCopyLocksFunc checks whether a function might
|
||||
// inadvertently copy a lock, by checking whether
|
||||
// its receiver, parameters, or return values
|
||||
// are locks.
|
||||
func checkCopyLocksFunc(pass *analysis.Pass, name string, recv *ast.FieldList, typ *ast.FuncType) {
|
||||
if recv != nil && len(recv.List) > 0 {
|
||||
expr := recv.List[0].Type
|
||||
if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type); path != nil {
|
||||
pass.Reportf(expr.Pos(), "%s passes lock by value: %v", name, path)
|
||||
}
|
||||
}
|
||||
|
||||
if typ.Params != nil {
|
||||
for _, field := range typ.Params.List {
|
||||
expr := field.Type
|
||||
if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type); path != nil {
|
||||
pass.Reportf(expr.Pos(), "%s passes lock by value: %v", name, path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Don't check typ.Results. If T has a Lock field it's OK to write
|
||||
// return T{}
|
||||
// because that is returning the zero value. Leave result checking
|
||||
// to the return statement.
|
||||
}
|
||||
|
||||
// checkCopyLocksRange checks whether a range statement
|
||||
// might inadvertently copy a lock by checking whether
|
||||
// any of the range variables are locks.
|
||||
func checkCopyLocksRange(pass *analysis.Pass, r *ast.RangeStmt) {
|
||||
checkCopyLocksRangeVar(pass, r.Tok, r.Key)
|
||||
checkCopyLocksRangeVar(pass, r.Tok, r.Value)
|
||||
}
|
||||
|
||||
func checkCopyLocksRangeVar(pass *analysis.Pass, rtok token.Token, e ast.Expr) {
|
||||
if e == nil {
|
||||
return
|
||||
}
|
||||
id, isId := e.(*ast.Ident)
|
||||
if isId && id.Name == "_" {
|
||||
return
|
||||
}
|
||||
|
||||
var typ types.Type
|
||||
if rtok == token.DEFINE {
|
||||
if !isId {
|
||||
return
|
||||
}
|
||||
obj := pass.TypesInfo.Defs[id]
|
||||
if obj == nil {
|
||||
return
|
||||
}
|
||||
typ = obj.Type()
|
||||
} else {
|
||||
typ = pass.TypesInfo.Types[e].Type
|
||||
}
|
||||
|
||||
if typ == nil {
|
||||
return
|
||||
}
|
||||
if path := lockPath(pass.Pkg, typ); path != nil {
|
||||
pass.Reportf(e.Pos(), "range var %s copies lock: %v", analysisutil.Format(pass.Fset, e), path)
|
||||
}
|
||||
}
|
||||
|
||||
type typePath []types.Type
|
||||
|
||||
// String pretty-prints a typePath.
|
||||
func (path typePath) String() string {
|
||||
n := len(path)
|
||||
var buf bytes.Buffer
|
||||
for i := range path {
|
||||
if i > 0 {
|
||||
fmt.Fprint(&buf, " contains ")
|
||||
}
|
||||
// The human-readable path is in reverse order, outermost to innermost.
|
||||
fmt.Fprint(&buf, path[n-i-1].String())
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func lockPathRhs(pass *analysis.Pass, x ast.Expr) typePath {
|
||||
if _, ok := x.(*ast.CompositeLit); ok {
|
||||
return nil
|
||||
}
|
||||
if _, ok := x.(*ast.CallExpr); ok {
|
||||
// A call may return a zero value.
|
||||
return nil
|
||||
}
|
||||
if star, ok := x.(*ast.StarExpr); ok {
|
||||
if _, ok := star.X.(*ast.CallExpr); ok {
|
||||
// A call may return a pointer to a zero value.
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return lockPath(pass.Pkg, pass.TypesInfo.Types[x].Type)
|
||||
}
|
||||
|
||||
// lockPath returns a typePath describing the location of a lock value
|
||||
// contained in typ. If there is no contained lock, it returns nil.
|
||||
func lockPath(tpkg *types.Package, typ types.Type) typePath {
|
||||
if typ == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for {
|
||||
atyp, ok := typ.Underlying().(*types.Array)
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
typ = atyp.Elem()
|
||||
}
|
||||
|
||||
// We're only interested in the case in which the underlying
|
||||
// type is a struct. (Interfaces and pointers are safe to copy.)
|
||||
styp, ok := typ.Underlying().(*types.Struct)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// We're looking for cases in which a pointer to this type
|
||||
// is a sync.Locker, but a value is not. This differentiates
|
||||
// embedded interfaces from embedded values.
|
||||
if types.Implements(types.NewPointer(typ), lockerType) && !types.Implements(typ, lockerType) {
|
||||
return []types.Type{typ}
|
||||
}
|
||||
|
||||
// In go1.10, sync.noCopy did not implement Locker.
|
||||
// (The Unlock method was added only in CL 121876.)
|
||||
// TODO(adonovan): remove workaround when we drop go1.10.
|
||||
if named, ok := typ.(*types.Named); ok &&
|
||||
named.Obj().Name() == "noCopy" &&
|
||||
named.Obj().Pkg().Path() == "sync" {
|
||||
return []types.Type{typ}
|
||||
}
|
||||
|
||||
nfields := styp.NumFields()
|
||||
for i := 0; i < nfields; i++ {
|
||||
ftyp := styp.Field(i).Type()
|
||||
subpath := lockPath(tpkg, ftyp)
|
||||
if subpath != nil {
|
||||
return append(subpath, typ)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var lockerType *types.Interface
|
||||
|
||||
// Construct a sync.Locker interface type.
|
||||
func init() {
|
||||
nullary := types.NewSignature(nil, nil, nil, false) // func()
|
||||
methods := []*types.Func{
|
||||
types.NewFunc(token.NoPos, nil, "Lock", nullary),
|
||||
types.NewFunc(token.NoPos, nil, "Unlock", nullary),
|
||||
}
|
||||
lockerType = types.NewInterface(methods, nil).Complete()
|
||||
}
|
225
vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go
generated
vendored
Normal file
225
vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go
generated
vendored
Normal file
|
@ -0,0 +1,225 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package ctrlflow is an analysis that provides a syntactic
|
||||
// control-flow graph (CFG) for the body of a function.
|
||||
// It records whether a function cannot return.
|
||||
// By itself, it does not report any diagnostics.
|
||||
package ctrlflow
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/types"
|
||||
"log"
|
||||
"reflect"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
"golang.org/x/tools/go/cfg"
|
||||
"golang.org/x/tools/go/types/typeutil"
|
||||
)
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "ctrlflow",
|
||||
Doc: "build a control-flow graph",
|
||||
Run: run,
|
||||
ResultType: reflect.TypeOf(new(CFGs)),
|
||||
FactTypes: []analysis.Fact{new(noReturn)},
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
}
|
||||
|
||||
// noReturn is a fact indicating that a function does not return.
|
||||
type noReturn struct{}
|
||||
|
||||
func (*noReturn) AFact() {}
|
||||
|
||||
func (*noReturn) String() string { return "noReturn" }
|
||||
|
||||
// A CFGs holds the control-flow graphs
|
||||
// for all the functions of the current package.
|
||||
type CFGs struct {
|
||||
defs map[*ast.Ident]types.Object // from Pass.TypesInfo.Defs
|
||||
funcDecls map[*types.Func]*declInfo
|
||||
funcLits map[*ast.FuncLit]*litInfo
|
||||
pass *analysis.Pass // transient; nil after construction
|
||||
}
|
||||
|
||||
// CFGs has two maps: funcDecls for named functions and funcLits for
|
||||
// unnamed ones. Unlike funcLits, the funcDecls map is not keyed by its
|
||||
// syntax node, *ast.FuncDecl, because callMayReturn needs to do a
|
||||
// look-up by *types.Func, and you can get from an *ast.FuncDecl to a
|
||||
// *types.Func but not the other way.
|
||||
|
||||
type declInfo struct {
|
||||
decl *ast.FuncDecl
|
||||
cfg *cfg.CFG // iff decl.Body != nil
|
||||
started bool // to break cycles
|
||||
noReturn bool
|
||||
}
|
||||
|
||||
type litInfo struct {
|
||||
cfg *cfg.CFG
|
||||
noReturn bool
|
||||
}
|
||||
|
||||
// FuncDecl returns the control-flow graph for a named function.
|
||||
// It returns nil if decl.Body==nil.
|
||||
func (c *CFGs) FuncDecl(decl *ast.FuncDecl) *cfg.CFG {
|
||||
if decl.Body == nil {
|
||||
return nil
|
||||
}
|
||||
fn := c.defs[decl.Name].(*types.Func)
|
||||
return c.funcDecls[fn].cfg
|
||||
}
|
||||
|
||||
// FuncLit returns the control-flow graph for a literal function.
|
||||
func (c *CFGs) FuncLit(lit *ast.FuncLit) *cfg.CFG {
|
||||
return c.funcLits[lit].cfg
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
// Because CFG construction consumes and produces noReturn
|
||||
// facts, CFGs for exported FuncDecls must be built before 'run'
|
||||
// returns; we cannot construct them lazily.
|
||||
// (We could build CFGs for FuncLits lazily,
|
||||
// but the benefit is marginal.)
|
||||
|
||||
// Pass 1. Map types.Funcs to ast.FuncDecls in this package.
|
||||
funcDecls := make(map[*types.Func]*declInfo) // functions and methods
|
||||
funcLits := make(map[*ast.FuncLit]*litInfo)
|
||||
|
||||
var decls []*types.Func // keys(funcDecls), in order
|
||||
var lits []*ast.FuncLit // keys(funcLits), in order
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.FuncDecl)(nil),
|
||||
(*ast.FuncLit)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
switch n := n.(type) {
|
||||
case *ast.FuncDecl:
|
||||
fn := pass.TypesInfo.Defs[n.Name].(*types.Func)
|
||||
funcDecls[fn] = &declInfo{decl: n}
|
||||
decls = append(decls, fn)
|
||||
|
||||
case *ast.FuncLit:
|
||||
funcLits[n] = new(litInfo)
|
||||
lits = append(lits, n)
|
||||
}
|
||||
})
|
||||
|
||||
c := &CFGs{
|
||||
defs: pass.TypesInfo.Defs,
|
||||
funcDecls: funcDecls,
|
||||
funcLits: funcLits,
|
||||
pass: pass,
|
||||
}
|
||||
|
||||
// Pass 2. Build CFGs.
|
||||
|
||||
// Build CFGs for named functions.
|
||||
// Cycles in the static call graph are broken
|
||||
// arbitrarily but deterministically.
|
||||
// We create noReturn facts as discovered.
|
||||
for _, fn := range decls {
|
||||
c.buildDecl(fn, funcDecls[fn])
|
||||
}
|
||||
|
||||
// Build CFGs for literal functions.
|
||||
// These aren't relevant to facts (since they aren't named)
|
||||
// but are required for the CFGs.FuncLit API.
|
||||
for _, lit := range lits {
|
||||
li := funcLits[lit]
|
||||
if li.cfg == nil {
|
||||
li.cfg = cfg.New(lit.Body, c.callMayReturn)
|
||||
if !hasReachableReturn(li.cfg) {
|
||||
li.noReturn = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// All CFGs are now built.
|
||||
c.pass = nil
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
// di.cfg may be nil on return.
|
||||
func (c *CFGs) buildDecl(fn *types.Func, di *declInfo) {
|
||||
// buildDecl may call itself recursively for the same function,
|
||||
// because cfg.New is passed the callMayReturn method, which
|
||||
// builds the CFG of the callee, leading to recursion.
|
||||
// The buildDecl call tree thus resembles the static call graph.
|
||||
// We mark each node when we start working on it to break cycles.
|
||||
|
||||
if !di.started { // break cycle
|
||||
di.started = true
|
||||
|
||||
if isIntrinsicNoReturn(fn) {
|
||||
di.noReturn = true
|
||||
}
|
||||
if di.decl.Body != nil {
|
||||
di.cfg = cfg.New(di.decl.Body, c.callMayReturn)
|
||||
if !hasReachableReturn(di.cfg) {
|
||||
di.noReturn = true
|
||||
}
|
||||
}
|
||||
if di.noReturn {
|
||||
c.pass.ExportObjectFact(fn, new(noReturn))
|
||||
}
|
||||
|
||||
// debugging
|
||||
if false {
|
||||
log.Printf("CFG for %s:\n%s (noreturn=%t)\n", fn, di.cfg.Format(c.pass.Fset), di.noReturn)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// callMayReturn reports whether the called function may return.
|
||||
// It is passed to the CFG builder.
|
||||
func (c *CFGs) callMayReturn(call *ast.CallExpr) (r bool) {
|
||||
if id, ok := call.Fun.(*ast.Ident); ok && c.pass.TypesInfo.Uses[id] == panicBuiltin {
|
||||
return false // panic never returns
|
||||
}
|
||||
|
||||
// Is this a static call?
|
||||
fn := typeutil.StaticCallee(c.pass.TypesInfo, call)
|
||||
if fn == nil {
|
||||
return true // callee not statically known; be conservative
|
||||
}
|
||||
|
||||
// Function or method declared in this package?
|
||||
if di, ok := c.funcDecls[fn]; ok {
|
||||
c.buildDecl(fn, di)
|
||||
return !di.noReturn
|
||||
}
|
||||
|
||||
// Not declared in this package.
|
||||
// Is there a fact from another package?
|
||||
return !c.pass.ImportObjectFact(fn, new(noReturn))
|
||||
}
|
||||
|
||||
var panicBuiltin = types.Universe.Lookup("panic").(*types.Builtin)
|
||||
|
||||
func hasReachableReturn(g *cfg.CFG) bool {
|
||||
for _, b := range g.Blocks {
|
||||
if b.Live && b.Return() != nil {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// isIntrinsicNoReturn reports whether a function intrinsically never
|
||||
// returns because it stops execution of the calling thread.
|
||||
// It is the base case in the recursion.
|
||||
func isIntrinsicNoReturn(fn *types.Func) bool {
|
||||
// Add functions here as the need arises, but don't allocate memory.
|
||||
path, name := fn.Pkg().Path(), fn.Name()
|
||||
return path == "syscall" && (name == "Exit" || name == "ExitProcess" || name == "ExitThread") ||
|
||||
path == "runtime" && name == "Goexit"
|
||||
}
|
177
vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go
generated
vendored
Normal file
177
vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go
generated
vendored
Normal file
|
@ -0,0 +1,177 @@
|
|||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package httpresponse defines an Analyzer that checks for mistakes
|
||||
// using HTTP responses.
|
||||
package httpresponse
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/types"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
const Doc = `check for mistakes using HTTP responses
|
||||
|
||||
A common mistake when using the net/http package is to defer a function
|
||||
call to close the http.Response Body before checking the error that
|
||||
determines whether the response is valid:
|
||||
|
||||
resp, err := http.Head(url)
|
||||
defer resp.Body.Close()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
// (defer statement belongs here)
|
||||
|
||||
This checker helps uncover latent nil dereference bugs by reporting a
|
||||
diagnostic for such mistakes.`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "httpresponse",
|
||||
Doc: Doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
// Fast path: if the package doesn't import net/http,
|
||||
// skip the traversal.
|
||||
if !imports(pass.Pkg, "net/http") {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.CallExpr)(nil),
|
||||
}
|
||||
inspect.WithStack(nodeFilter, func(n ast.Node, push bool, stack []ast.Node) bool {
|
||||
if !push {
|
||||
return true
|
||||
}
|
||||
call := n.(*ast.CallExpr)
|
||||
if !isHTTPFuncOrMethodOnClient(pass.TypesInfo, call) {
|
||||
return true // the function call is not related to this check.
|
||||
}
|
||||
|
||||
// Find the innermost containing block, and get the list
|
||||
// of statements starting with the one containing call.
|
||||
stmts := restOfBlock(stack)
|
||||
if len(stmts) < 2 {
|
||||
return true // the call to the http function is the last statement of the block.
|
||||
}
|
||||
|
||||
asg, ok := stmts[0].(*ast.AssignStmt)
|
||||
if !ok {
|
||||
return true // the first statement is not assignment.
|
||||
}
|
||||
resp := rootIdent(asg.Lhs[0])
|
||||
if resp == nil {
|
||||
return true // could not find the http.Response in the assignment.
|
||||
}
|
||||
|
||||
def, ok := stmts[1].(*ast.DeferStmt)
|
||||
if !ok {
|
||||
return true // the following statement is not a defer.
|
||||
}
|
||||
root := rootIdent(def.Call.Fun)
|
||||
if root == nil {
|
||||
return true // could not find the receiver of the defer call.
|
||||
}
|
||||
|
||||
if resp.Obj == root.Obj {
|
||||
pass.Reportf(root.Pos(), "using %s before checking for errors", resp.Name)
|
||||
}
|
||||
return true
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// isHTTPFuncOrMethodOnClient checks whether the given call expression is on
|
||||
// either a function of the net/http package or a method of http.Client that
|
||||
// returns (*http.Response, error).
|
||||
func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool {
|
||||
fun, _ := expr.Fun.(*ast.SelectorExpr)
|
||||
sig, _ := info.Types[fun].Type.(*types.Signature)
|
||||
if sig == nil {
|
||||
return false // the call is not of the form x.f()
|
||||
}
|
||||
|
||||
res := sig.Results()
|
||||
if res.Len() != 2 {
|
||||
return false // the function called does not return two values.
|
||||
}
|
||||
if ptr, ok := res.At(0).Type().(*types.Pointer); !ok || !isNamedType(ptr.Elem(), "net/http", "Response") {
|
||||
return false // the first return type is not *http.Response.
|
||||
}
|
||||
|
||||
errorType := types.Universe.Lookup("error").Type()
|
||||
if !types.Identical(res.At(1).Type(), errorType) {
|
||||
return false // the second return type is not error
|
||||
}
|
||||
|
||||
typ := info.Types[fun.X].Type
|
||||
if typ == nil {
|
||||
id, ok := fun.X.(*ast.Ident)
|
||||
return ok && id.Name == "http" // function in net/http package.
|
||||
}
|
||||
|
||||
if isNamedType(typ, "net/http", "Client") {
|
||||
return true // method on http.Client.
|
||||
}
|
||||
ptr, ok := typ.(*types.Pointer)
|
||||
return ok && isNamedType(ptr.Elem(), "net/http", "Client") // method on *http.Client.
|
||||
}
|
||||
|
||||
// restOfBlock, given a traversal stack, finds the innermost containing
|
||||
// block and returns the suffix of its statements starting with the
|
||||
// current node (the last element of stack).
|
||||
func restOfBlock(stack []ast.Node) []ast.Stmt {
|
||||
for i := len(stack) - 1; i >= 0; i-- {
|
||||
if b, ok := stack[i].(*ast.BlockStmt); ok {
|
||||
for j, v := range b.List {
|
||||
if v == stack[i+1] {
|
||||
return b.List[j:]
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// rootIdent finds the root identifier x in a chain of selections x.y.z, or nil if not found.
|
||||
func rootIdent(n ast.Node) *ast.Ident {
|
||||
switch n := n.(type) {
|
||||
case *ast.SelectorExpr:
|
||||
return rootIdent(n.X)
|
||||
case *ast.Ident:
|
||||
return n
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// isNamedType reports whether t is the named type path.name.
|
||||
func isNamedType(t types.Type, path, name string) bool {
|
||||
n, ok := t.(*types.Named)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
obj := n.Obj()
|
||||
return obj.Name() == name && obj.Pkg() != nil && obj.Pkg().Path() == path
|
||||
}
|
||||
|
||||
func imports(pkg *types.Package, path string) bool {
|
||||
for _, imp := range pkg.Imports() {
|
||||
if imp.Path() == path {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
49
vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go
generated
vendored
Normal file
49
vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go
generated
vendored
Normal file
|
@ -0,0 +1,49 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package inspect defines an Analyzer that provides an AST inspector
|
||||
// (golang.org/x/tools/go/ast/inspect.Inspect) for the syntax trees of a
|
||||
// package. It is only a building block for other analyzers.
|
||||
//
|
||||
// Example of use in another analysis:
|
||||
//
|
||||
// import (
|
||||
// "golang.org/x/tools/go/analysis"
|
||||
// "golang.org/x/tools/go/analysis/passes/inspect"
|
||||
// "golang.org/x/tools/go/ast/inspector"
|
||||
// )
|
||||
//
|
||||
// var Analyzer = &analysis.Analyzer{
|
||||
// ...
|
||||
// Requires: reflect.TypeOf(new(inspect.Analyzer)),
|
||||
// }
|
||||
//
|
||||
// func run(pass *analysis.Pass) (interface{}, error) {
|
||||
// inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
// inspect.Preorder(nil, func(n ast.Node) {
|
||||
// ...
|
||||
// })
|
||||
// return nil
|
||||
// }
|
||||
//
|
||||
package inspect
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "inspect",
|
||||
Doc: "optimize AST traversal for later passes",
|
||||
Run: run,
|
||||
RunDespiteErrors: true,
|
||||
ResultType: reflect.TypeOf(new(inspector.Inspector)),
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
return inspector.New(pass.Files), nil
|
||||
}
|
106
vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go
generated
vendored
Normal file
106
vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go
generated
vendored
Normal file
|
@ -0,0 +1,106 @@
|
|||
// Package analysisutil defines various helper functions
|
||||
// used by two or more packages beneath go/analysis.
|
||||
package analysisutil
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"go/ast"
|
||||
"go/printer"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"io/ioutil"
|
||||
)
|
||||
|
||||
// Format returns a string representation of the expression.
|
||||
func Format(fset *token.FileSet, x ast.Expr) string {
|
||||
var b bytes.Buffer
|
||||
printer.Fprint(&b, fset, x)
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// HasSideEffects reports whether evaluation of e has side effects.
|
||||
func HasSideEffects(info *types.Info, e ast.Expr) bool {
|
||||
safe := true
|
||||
ast.Inspect(e, func(node ast.Node) bool {
|
||||
switch n := node.(type) {
|
||||
case *ast.CallExpr:
|
||||
typVal := info.Types[n.Fun]
|
||||
switch {
|
||||
case typVal.IsType():
|
||||
// Type conversion, which is safe.
|
||||
case typVal.IsBuiltin():
|
||||
// Builtin func, conservatively assumed to not
|
||||
// be safe for now.
|
||||
safe = false
|
||||
return false
|
||||
default:
|
||||
// A non-builtin func or method call.
|
||||
// Conservatively assume that all of them have
|
||||
// side effects for now.
|
||||
safe = false
|
||||
return false
|
||||
}
|
||||
case *ast.UnaryExpr:
|
||||
if n.Op == token.ARROW {
|
||||
safe = false
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
return !safe
|
||||
}
|
||||
|
||||
// Unparen returns e with any enclosing parentheses stripped.
|
||||
func Unparen(e ast.Expr) ast.Expr {
|
||||
for {
|
||||
p, ok := e.(*ast.ParenExpr)
|
||||
if !ok {
|
||||
return e
|
||||
}
|
||||
e = p.X
|
||||
}
|
||||
}
|
||||
|
||||
// ReadFile reads a file and adds it to the FileSet
|
||||
// so that we can report errors against it using lineStart.
|
||||
func ReadFile(fset *token.FileSet, filename string) ([]byte, *token.File, error) {
|
||||
content, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
tf := fset.AddFile(filename, -1, len(content))
|
||||
tf.SetLinesForContent(content)
|
||||
return content, tf, nil
|
||||
}
|
||||
|
||||
// LineStart returns the position of the start of the specified line
|
||||
// within file f, or NoPos if there is no line of that number.
|
||||
func LineStart(f *token.File, line int) token.Pos {
|
||||
// Use binary search to find the start offset of this line.
|
||||
//
|
||||
// TODO(adonovan): eventually replace this function with the
|
||||
// simpler and more efficient (*go/token.File).LineStart, added
|
||||
// in go1.12.
|
||||
|
||||
min := 0 // inclusive
|
||||
max := f.Size() // exclusive
|
||||
for {
|
||||
offset := (min + max) / 2
|
||||
pos := f.Pos(offset)
|
||||
posn := f.Position(pos)
|
||||
if posn.Line == line {
|
||||
return pos - (token.Pos(posn.Column) - 1)
|
||||
}
|
||||
|
||||
if min+1 >= max {
|
||||
return token.NoPos
|
||||
}
|
||||
|
||||
if posn.Line < line {
|
||||
min = offset
|
||||
} else {
|
||||
max = offset
|
||||
}
|
||||
}
|
||||
}
|
130
vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go
generated
vendored
Normal file
130
vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go
generated
vendored
Normal file
|
@ -0,0 +1,130 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package loopclosure defines an Analyzer that checks for references to
|
||||
// enclosing loop variables from within nested functions.
|
||||
package loopclosure
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
// TODO(adonovan): also report an error for the following structure,
|
||||
// which is often used to ensure that deferred calls do not accumulate
|
||||
// in a loop:
|
||||
//
|
||||
// for i, x := range c {
|
||||
// func() {
|
||||
// ...reference to i or x...
|
||||
// }()
|
||||
// }
|
||||
|
||||
const Doc = `check references to loop variables from within nested functions
|
||||
|
||||
This analyzer checks for references to loop variables from within a
|
||||
function literal inside the loop body. It checks only instances where
|
||||
the function literal is called in a defer or go statement that is the
|
||||
last statement in the loop body, as otherwise we would need whole
|
||||
program analysis.
|
||||
|
||||
For example:
|
||||
|
||||
for i, v := range s {
|
||||
go func() {
|
||||
println(i, v) // not what you might expect
|
||||
}()
|
||||
}
|
||||
|
||||
See: https://golang.org/doc/go_faq.html#closures_and_goroutines`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "loopclosure",
|
||||
Doc: Doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.RangeStmt)(nil),
|
||||
(*ast.ForStmt)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
// Find the variables updated by the loop statement.
|
||||
var vars []*ast.Ident
|
||||
addVar := func(expr ast.Expr) {
|
||||
if id, ok := expr.(*ast.Ident); ok {
|
||||
vars = append(vars, id)
|
||||
}
|
||||
}
|
||||
var body *ast.BlockStmt
|
||||
switch n := n.(type) {
|
||||
case *ast.RangeStmt:
|
||||
body = n.Body
|
||||
addVar(n.Key)
|
||||
addVar(n.Value)
|
||||
case *ast.ForStmt:
|
||||
body = n.Body
|
||||
switch post := n.Post.(type) {
|
||||
case *ast.AssignStmt:
|
||||
// e.g. for p = head; p != nil; p = p.next
|
||||
for _, lhs := range post.Lhs {
|
||||
addVar(lhs)
|
||||
}
|
||||
case *ast.IncDecStmt:
|
||||
// e.g. for i := 0; i < n; i++
|
||||
addVar(post.X)
|
||||
}
|
||||
}
|
||||
if vars == nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Inspect a go or defer statement
|
||||
// if it's the last one in the loop body.
|
||||
// (We give up if there are following statements,
|
||||
// because it's hard to prove go isn't followed by wait,
|
||||
// or defer by return.)
|
||||
if len(body.List) == 0 {
|
||||
return
|
||||
}
|
||||
var last *ast.CallExpr
|
||||
switch s := body.List[len(body.List)-1].(type) {
|
||||
case *ast.GoStmt:
|
||||
last = s.Call
|
||||
case *ast.DeferStmt:
|
||||
last = s.Call
|
||||
default:
|
||||
return
|
||||
}
|
||||
lit, ok := last.Fun.(*ast.FuncLit)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
ast.Inspect(lit.Body, func(n ast.Node) bool {
|
||||
id, ok := n.(*ast.Ident)
|
||||
if !ok || id.Obj == nil {
|
||||
return true
|
||||
}
|
||||
if pass.TypesInfo.Types[id].Type == nil {
|
||||
// Not referring to a variable (e.g. struct field name)
|
||||
return true
|
||||
}
|
||||
for _, v := range vars {
|
||||
if v.Obj == id.Obj {
|
||||
pass.Reportf(id.Pos(), "loop variable %s captured by func literal",
|
||||
id.Name)
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
})
|
||||
return nil, nil
|
||||
}
|
315
vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go
generated
vendored
Normal file
315
vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go
generated
vendored
Normal file
|
@ -0,0 +1,315 @@
|
|||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package lostcancel defines an Analyzer that checks for failure to
|
||||
// call a context cancelation function.
|
||||
package lostcancel
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/types"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/ctrlflow"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
"golang.org/x/tools/go/cfg"
|
||||
)
|
||||
|
||||
const Doc = `check cancel func returned by context.WithCancel is called
|
||||
|
||||
The cancelation function returned by context.WithCancel, WithTimeout,
|
||||
and WithDeadline must be called or the new context will remain live
|
||||
until its parent context is cancelled.
|
||||
(The background context is never cancelled.)`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "lostcancel",
|
||||
Doc: Doc,
|
||||
Run: run,
|
||||
Requires: []*analysis.Analyzer{
|
||||
inspect.Analyzer,
|
||||
ctrlflow.Analyzer,
|
||||
},
|
||||
}
|
||||
|
||||
const debug = false
|
||||
|
||||
var contextPackage = "context"
|
||||
|
||||
// checkLostCancel reports a failure to the call the cancel function
|
||||
// returned by context.WithCancel, either because the variable was
|
||||
// assigned to the blank identifier, or because there exists a
|
||||
// control-flow path from the call to a return statement and that path
|
||||
// does not "use" the cancel function. Any reference to the variable
|
||||
// counts as a use, even within a nested function literal.
|
||||
//
|
||||
// checkLostCancel analyzes a single named or literal function.
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
// Fast path: bypass check if file doesn't use context.WithCancel.
|
||||
if !hasImport(pass.Pkg, contextPackage) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Call runFunc for each Func{Decl,Lit}.
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
nodeTypes := []ast.Node{
|
||||
(*ast.FuncLit)(nil),
|
||||
(*ast.FuncDecl)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeTypes, func(n ast.Node) {
|
||||
runFunc(pass, n)
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func runFunc(pass *analysis.Pass, node ast.Node) {
|
||||
// Maps each cancel variable to its defining ValueSpec/AssignStmt.
|
||||
cancelvars := make(map[*types.Var]ast.Node)
|
||||
|
||||
// TODO(adonovan): opt: refactor to make a single pass
|
||||
// over the AST using inspect.WithStack and node types
|
||||
// {FuncDecl,FuncLit,CallExpr,SelectorExpr}.
|
||||
|
||||
// Find the set of cancel vars to analyze.
|
||||
stack := make([]ast.Node, 0, 32)
|
||||
ast.Inspect(node, func(n ast.Node) bool {
|
||||
switch n.(type) {
|
||||
case *ast.FuncLit:
|
||||
if len(stack) > 0 {
|
||||
return false // don't stray into nested functions
|
||||
}
|
||||
case nil:
|
||||
stack = stack[:len(stack)-1] // pop
|
||||
return true
|
||||
}
|
||||
stack = append(stack, n) // push
|
||||
|
||||
// Look for [{AssignStmt,ValueSpec} CallExpr SelectorExpr]:
|
||||
//
|
||||
// ctx, cancel := context.WithCancel(...)
|
||||
// ctx, cancel = context.WithCancel(...)
|
||||
// var ctx, cancel = context.WithCancel(...)
|
||||
//
|
||||
if !isContextWithCancel(pass.TypesInfo, n) || !isCall(stack[len(stack)-2]) {
|
||||
return true
|
||||
}
|
||||
var id *ast.Ident // id of cancel var
|
||||
stmt := stack[len(stack)-3]
|
||||
switch stmt := stmt.(type) {
|
||||
case *ast.ValueSpec:
|
||||
if len(stmt.Names) > 1 {
|
||||
id = stmt.Names[1]
|
||||
}
|
||||
case *ast.AssignStmt:
|
||||
if len(stmt.Lhs) > 1 {
|
||||
id, _ = stmt.Lhs[1].(*ast.Ident)
|
||||
}
|
||||
}
|
||||
if id != nil {
|
||||
if id.Name == "_" {
|
||||
pass.Reportf(id.Pos(),
|
||||
"the cancel function returned by context.%s should be called, not discarded, to avoid a context leak",
|
||||
n.(*ast.SelectorExpr).Sel.Name)
|
||||
} else if v, ok := pass.TypesInfo.Uses[id].(*types.Var); ok {
|
||||
cancelvars[v] = stmt
|
||||
} else if v, ok := pass.TypesInfo.Defs[id].(*types.Var); ok {
|
||||
cancelvars[v] = stmt
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
if len(cancelvars) == 0 {
|
||||
return // no need to inspect CFG
|
||||
}
|
||||
|
||||
// Obtain the CFG.
|
||||
cfgs := pass.ResultOf[ctrlflow.Analyzer].(*ctrlflow.CFGs)
|
||||
var g *cfg.CFG
|
||||
var sig *types.Signature
|
||||
switch node := node.(type) {
|
||||
case *ast.FuncDecl:
|
||||
sig, _ = pass.TypesInfo.Defs[node.Name].Type().(*types.Signature)
|
||||
if node.Name.Name == "main" && sig.Recv() == nil && pass.Pkg.Name() == "main" {
|
||||
// Returning from main.main terminates the process,
|
||||
// so there's no need to cancel contexts.
|
||||
return
|
||||
}
|
||||
g = cfgs.FuncDecl(node)
|
||||
|
||||
case *ast.FuncLit:
|
||||
sig, _ = pass.TypesInfo.Types[node.Type].Type.(*types.Signature)
|
||||
g = cfgs.FuncLit(node)
|
||||
}
|
||||
if sig == nil {
|
||||
return // missing type information
|
||||
}
|
||||
|
||||
// Print CFG.
|
||||
if debug {
|
||||
fmt.Println(g.Format(pass.Fset))
|
||||
}
|
||||
|
||||
// Examine the CFG for each variable in turn.
|
||||
// (It would be more efficient to analyze all cancelvars in a
|
||||
// single pass over the AST, but seldom is there more than one.)
|
||||
for v, stmt := range cancelvars {
|
||||
if ret := lostCancelPath(pass, g, v, stmt, sig); ret != nil {
|
||||
lineno := pass.Fset.Position(stmt.Pos()).Line
|
||||
pass.Reportf(stmt.Pos(), "the %s function is not used on all paths (possible context leak)", v.Name())
|
||||
pass.Reportf(ret.Pos(), "this return statement may be reached without using the %s var defined on line %d", v.Name(), lineno)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func isCall(n ast.Node) bool { _, ok := n.(*ast.CallExpr); return ok }
|
||||
|
||||
func hasImport(pkg *types.Package, path string) bool {
|
||||
for _, imp := range pkg.Imports() {
|
||||
if imp.Path() == path {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// isContextWithCancel reports whether n is one of the qualified identifiers
|
||||
// context.With{Cancel,Timeout,Deadline}.
|
||||
func isContextWithCancel(info *types.Info, n ast.Node) bool {
|
||||
sel, ok := n.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
switch sel.Sel.Name {
|
||||
case "WithCancel", "WithTimeout", "WithDeadline":
|
||||
default:
|
||||
return false
|
||||
}
|
||||
if x, ok := sel.X.(*ast.Ident); ok {
|
||||
if pkgname, ok := info.Uses[x].(*types.PkgName); ok {
|
||||
return pkgname.Imported().Path() == contextPackage
|
||||
}
|
||||
// Import failed, so we can't check package path.
|
||||
// Just check the local package name (heuristic).
|
||||
return x.Name == "context"
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// lostCancelPath finds a path through the CFG, from stmt (which defines
|
||||
// the 'cancel' variable v) to a return statement, that doesn't "use" v.
|
||||
// If it finds one, it returns the return statement (which may be synthetic).
|
||||
// sig is the function's type, if known.
|
||||
func lostCancelPath(pass *analysis.Pass, g *cfg.CFG, v *types.Var, stmt ast.Node, sig *types.Signature) *ast.ReturnStmt {
|
||||
vIsNamedResult := sig != nil && tupleContains(sig.Results(), v)
|
||||
|
||||
// uses reports whether stmts contain a "use" of variable v.
|
||||
uses := func(pass *analysis.Pass, v *types.Var, stmts []ast.Node) bool {
|
||||
found := false
|
||||
for _, stmt := range stmts {
|
||||
ast.Inspect(stmt, func(n ast.Node) bool {
|
||||
switch n := n.(type) {
|
||||
case *ast.Ident:
|
||||
if pass.TypesInfo.Uses[n] == v {
|
||||
found = true
|
||||
}
|
||||
case *ast.ReturnStmt:
|
||||
// A naked return statement counts as a use
|
||||
// of the named result variables.
|
||||
if n.Results == nil && vIsNamedResult {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
return !found
|
||||
})
|
||||
}
|
||||
return found
|
||||
}
|
||||
|
||||
// blockUses computes "uses" for each block, caching the result.
|
||||
memo := make(map[*cfg.Block]bool)
|
||||
blockUses := func(pass *analysis.Pass, v *types.Var, b *cfg.Block) bool {
|
||||
res, ok := memo[b]
|
||||
if !ok {
|
||||
res = uses(pass, v, b.Nodes)
|
||||
memo[b] = res
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// Find the var's defining block in the CFG,
|
||||
// plus the rest of the statements of that block.
|
||||
var defblock *cfg.Block
|
||||
var rest []ast.Node
|
||||
outer:
|
||||
for _, b := range g.Blocks {
|
||||
for i, n := range b.Nodes {
|
||||
if n == stmt {
|
||||
defblock = b
|
||||
rest = b.Nodes[i+1:]
|
||||
break outer
|
||||
}
|
||||
}
|
||||
}
|
||||
if defblock == nil {
|
||||
panic("internal error: can't find defining block for cancel var")
|
||||
}
|
||||
|
||||
// Is v "used" in the remainder of its defining block?
|
||||
if uses(pass, v, rest) {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Does the defining block return without using v?
|
||||
if ret := defblock.Return(); ret != nil {
|
||||
return ret
|
||||
}
|
||||
|
||||
// Search the CFG depth-first for a path, from defblock to a
|
||||
// return block, in which v is never "used".
|
||||
seen := make(map[*cfg.Block]bool)
|
||||
var search func(blocks []*cfg.Block) *ast.ReturnStmt
|
||||
search = func(blocks []*cfg.Block) *ast.ReturnStmt {
|
||||
for _, b := range blocks {
|
||||
if seen[b] {
|
||||
continue
|
||||
}
|
||||
seen[b] = true
|
||||
|
||||
// Prune the search if the block uses v.
|
||||
if blockUses(pass, v, b) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Found path to return statement?
|
||||
if ret := b.Return(); ret != nil {
|
||||
if debug {
|
||||
fmt.Printf("found path to return in block %s\n", b)
|
||||
}
|
||||
return ret // found
|
||||
}
|
||||
|
||||
// Recur
|
||||
if ret := search(b.Succs); ret != nil {
|
||||
if debug {
|
||||
fmt.Printf(" from block %s\n", b)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return search(defblock.Succs)
|
||||
}
|
||||
|
||||
func tupleContains(tuple *types.Tuple, v *types.Var) bool {
|
||||
for i := 0; i < tuple.Len(); i++ {
|
||||
if tuple.At(i) == v {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
74
vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go
generated
vendored
Normal file
74
vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go
generated
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package nilfunc defines an Analyzer that checks for useless
|
||||
// comparisons against nil.
|
||||
package nilfunc
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
const Doc = `check for useless comparisons between functions and nil
|
||||
|
||||
A useless comparison is one like f == nil as opposed to f() == nil.`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "nilfunc",
|
||||
Doc: Doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.BinaryExpr)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
e := n.(*ast.BinaryExpr)
|
||||
|
||||
// Only want == or != comparisons.
|
||||
if e.Op != token.EQL && e.Op != token.NEQ {
|
||||
return
|
||||
}
|
||||
|
||||
// Only want comparisons with a nil identifier on one side.
|
||||
var e2 ast.Expr
|
||||
switch {
|
||||
case pass.TypesInfo.Types[e.X].IsNil():
|
||||
e2 = e.Y
|
||||
case pass.TypesInfo.Types[e.Y].IsNil():
|
||||
e2 = e.X
|
||||
default:
|
||||
return
|
||||
}
|
||||
|
||||
// Only want identifiers or selector expressions.
|
||||
var obj types.Object
|
||||
switch v := e2.(type) {
|
||||
case *ast.Ident:
|
||||
obj = pass.TypesInfo.Uses[v]
|
||||
case *ast.SelectorExpr:
|
||||
obj = pass.TypesInfo.Uses[v.Sel]
|
||||
default:
|
||||
return
|
||||
}
|
||||
|
||||
// Only want functions.
|
||||
if _, ok := obj.(*types.Func); !ok {
|
||||
return
|
||||
}
|
||||
|
||||
pass.Reportf(e.Pos(), "comparison of function %v %v nil is always %v", obj.Name(), e.Op, e.Op == token.NEQ)
|
||||
})
|
||||
return nil, nil
|
||||
}
|
1026
vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go
generated
vendored
Normal file
1026
vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
236
vendor/golang.org/x/tools/go/analysis/passes/printf/types.go
generated
vendored
Normal file
236
vendor/golang.org/x/tools/go/analysis/passes/printf/types.go
generated
vendored
Normal file
|
@ -0,0 +1,236 @@
|
|||
package printf
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/types"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
|
||||
)
|
||||
|
||||
var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
|
||||
|
||||
// matchArgType reports an error if printf verb t is not appropriate
|
||||
// for operand arg.
|
||||
//
|
||||
// typ is used only for recursive calls; external callers must supply nil.
|
||||
//
|
||||
// (Recursion arises from the compound types {map,chan,slice} which
|
||||
// may be printed with %d etc. if that is appropriate for their element
|
||||
// types.)
|
||||
func matchArgType(pass *analysis.Pass, t printfArgType, typ types.Type, arg ast.Expr) bool {
|
||||
return matchArgTypeInternal(pass, t, typ, arg, make(map[types.Type]bool))
|
||||
}
|
||||
|
||||
// matchArgTypeInternal is the internal version of matchArgType. It carries a map
|
||||
// remembering what types are in progress so we don't recur when faced with recursive
|
||||
// types or mutually recursive types.
|
||||
func matchArgTypeInternal(pass *analysis.Pass, t printfArgType, typ types.Type, arg ast.Expr, inProgress map[types.Type]bool) bool {
|
||||
// %v, %T accept any argument type.
|
||||
if t == anyType {
|
||||
return true
|
||||
}
|
||||
if typ == nil {
|
||||
// external call
|
||||
typ = pass.TypesInfo.Types[arg].Type
|
||||
if typ == nil {
|
||||
return true // probably a type check problem
|
||||
}
|
||||
}
|
||||
// If the type implements fmt.Formatter, we have nothing to check.
|
||||
if isFormatter(typ) {
|
||||
return true
|
||||
}
|
||||
// If we can use a string, might arg (dynamically) implement the Stringer or Error interface?
|
||||
if t&argString != 0 && isConvertibleToString(pass, typ) {
|
||||
return true
|
||||
}
|
||||
|
||||
typ = typ.Underlying()
|
||||
if inProgress[typ] {
|
||||
// We're already looking at this type. The call that started it will take care of it.
|
||||
return true
|
||||
}
|
||||
inProgress[typ] = true
|
||||
|
||||
switch typ := typ.(type) {
|
||||
case *types.Signature:
|
||||
return t == argPointer
|
||||
|
||||
case *types.Map:
|
||||
return t == argPointer ||
|
||||
// Recur: map[int]int matches %d.
|
||||
(matchArgTypeInternal(pass, t, typ.Key(), arg, inProgress) && matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress))
|
||||
|
||||
case *types.Chan:
|
||||
return t&argPointer != 0
|
||||
|
||||
case *types.Array:
|
||||
// Same as slice.
|
||||
if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
|
||||
return true // %s matches []byte
|
||||
}
|
||||
// Recur: []int matches %d.
|
||||
return matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress)
|
||||
|
||||
case *types.Slice:
|
||||
// Same as array.
|
||||
if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
|
||||
return true // %s matches []byte
|
||||
}
|
||||
if t == argPointer {
|
||||
return true // %p prints a slice's 0th element
|
||||
}
|
||||
// Recur: []int matches %d. But watch out for
|
||||
// type T []T
|
||||
// If the element is a pointer type (type T[]*T), it's handled fine by the Pointer case below.
|
||||
return matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress)
|
||||
|
||||
case *types.Pointer:
|
||||
// Ugly, but dealing with an edge case: a known pointer to an invalid type,
|
||||
// probably something from a failed import.
|
||||
if typ.Elem().String() == "invalid type" {
|
||||
if false {
|
||||
pass.Reportf(arg.Pos(), "printf argument %v is pointer to invalid or unknown type", analysisutil.Format(pass.Fset, arg))
|
||||
}
|
||||
return true // special case
|
||||
}
|
||||
// If it's actually a pointer with %p, it prints as one.
|
||||
if t == argPointer {
|
||||
return true
|
||||
}
|
||||
|
||||
under := typ.Elem().Underlying()
|
||||
switch under.(type) {
|
||||
case *types.Struct: // see below
|
||||
case *types.Array: // see below
|
||||
case *types.Slice: // see below
|
||||
case *types.Map: // see below
|
||||
default:
|
||||
// Check whether the rest can print pointers.
|
||||
return t&argPointer != 0
|
||||
}
|
||||
// If it's a top-level pointer to a struct, array, slice, or
|
||||
// map, that's equivalent in our analysis to whether we can
|
||||
// print the type being pointed to. Pointers in nested levels
|
||||
// are not supported to minimize fmt running into loops.
|
||||
if len(inProgress) > 1 {
|
||||
return false
|
||||
}
|
||||
return matchArgTypeInternal(pass, t, under, arg, inProgress)
|
||||
|
||||
case *types.Struct:
|
||||
return matchStructArgType(pass, t, typ, arg, inProgress)
|
||||
|
||||
case *types.Interface:
|
||||
// There's little we can do.
|
||||
// Whether any particular verb is valid depends on the argument.
|
||||
// The user may have reasonable prior knowledge of the contents of the interface.
|
||||
return true
|
||||
|
||||
case *types.Basic:
|
||||
switch typ.Kind() {
|
||||
case types.UntypedBool,
|
||||
types.Bool:
|
||||
return t&argBool != 0
|
||||
|
||||
case types.UntypedInt,
|
||||
types.Int,
|
||||
types.Int8,
|
||||
types.Int16,
|
||||
types.Int32,
|
||||
types.Int64,
|
||||
types.Uint,
|
||||
types.Uint8,
|
||||
types.Uint16,
|
||||
types.Uint32,
|
||||
types.Uint64,
|
||||
types.Uintptr:
|
||||
return t&argInt != 0
|
||||
|
||||
case types.UntypedFloat,
|
||||
types.Float32,
|
||||
types.Float64:
|
||||
return t&argFloat != 0
|
||||
|
||||
case types.UntypedComplex,
|
||||
types.Complex64,
|
||||
types.Complex128:
|
||||
return t&argComplex != 0
|
||||
|
||||
case types.UntypedString,
|
||||
types.String:
|
||||
return t&argString != 0
|
||||
|
||||
case types.UnsafePointer:
|
||||
return t&(argPointer|argInt) != 0
|
||||
|
||||
case types.UntypedRune:
|
||||
return t&(argInt|argRune) != 0
|
||||
|
||||
case types.UntypedNil:
|
||||
return false
|
||||
|
||||
case types.Invalid:
|
||||
if false {
|
||||
pass.Reportf(arg.Pos(), "printf argument %v has invalid or unknown type", analysisutil.Format(pass.Fset, arg))
|
||||
}
|
||||
return true // Probably a type check problem.
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func isConvertibleToString(pass *analysis.Pass, typ types.Type) bool {
|
||||
if bt, ok := typ.(*types.Basic); ok && bt.Kind() == types.UntypedNil {
|
||||
// We explicitly don't want untyped nil, which is
|
||||
// convertible to both of the interfaces below, as it
|
||||
// would just panic anyway.
|
||||
return false
|
||||
}
|
||||
if types.ConvertibleTo(typ, errorType) {
|
||||
return true // via .Error()
|
||||
}
|
||||
|
||||
// Does it implement fmt.Stringer?
|
||||
if obj, _, _ := types.LookupFieldOrMethod(typ, false, nil, "String"); obj != nil {
|
||||
if fn, ok := obj.(*types.Func); ok {
|
||||
sig := fn.Type().(*types.Signature)
|
||||
if sig.Params().Len() == 0 &&
|
||||
sig.Results().Len() == 1 &&
|
||||
sig.Results().At(0).Type() == types.Typ[types.String] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// hasBasicType reports whether x's type is a types.Basic with the given kind.
|
||||
func hasBasicType(pass *analysis.Pass, x ast.Expr, kind types.BasicKind) bool {
|
||||
t := pass.TypesInfo.Types[x].Type
|
||||
if t != nil {
|
||||
t = t.Underlying()
|
||||
}
|
||||
b, ok := t.(*types.Basic)
|
||||
return ok && b.Kind() == kind
|
||||
}
|
||||
|
||||
// matchStructArgType reports whether all the elements of the struct match the expected
|
||||
// type. For instance, with "%d" all the elements must be printable with the "%d" format.
|
||||
func matchStructArgType(pass *analysis.Pass, t printfArgType, typ *types.Struct, arg ast.Expr, inProgress map[types.Type]bool) bool {
|
||||
for i := 0; i < typ.NumFields(); i++ {
|
||||
typf := typ.Field(i)
|
||||
if !matchArgTypeInternal(pass, t, typf.Type(), arg, inProgress) {
|
||||
return false
|
||||
}
|
||||
if t&argString != 0 && !typf.Exported() && isConvertibleToString(pass, typf.Type()) {
|
||||
// Issue #17798: unexported Stringer or error cannot be properly fomatted.
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
|
@ -2,8 +2,23 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
This file contains the code to check for shadowed variables.
|
||||
package shadow
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
// NOTE: Experimental. Not part of the vet suite.
|
||||
|
||||
const Doc = `check for possible unintended shadowing of variables
|
||||
|
||||
This analyzer check for shadowed variables.
|
||||
A shadowed variable is a variable declared in an inner scope
|
||||
with the same name and type as a variable in an outer scope,
|
||||
and where the outer variable is mentioned after the inner one
|
||||
|
@ -25,41 +40,70 @@ For example:
|
|||
}
|
||||
return err
|
||||
}
|
||||
`
|
||||
|
||||
*/
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "shadow",
|
||||
Doc: Doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
Run: run,
|
||||
}
|
||||
|
||||
package govet
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
var strictShadowing = flag.Bool("shadowstrict", false, "whether to be strict about shadowing; can be noisy")
|
||||
// flags
|
||||
var strict = false
|
||||
|
||||
func init() {
|
||||
register("shadow",
|
||||
"check for shadowed variables (experimental; must be set explicitly)",
|
||||
checkShadow,
|
||||
assignStmt, genDecl)
|
||||
experimental["shadow"] = true
|
||||
Analyzer.Flags.BoolVar(&strict, "strict", strict, "whether to be strict about shadowing; can be noisy")
|
||||
}
|
||||
|
||||
func checkShadow(f *File, node ast.Node) {
|
||||
switch n := node.(type) {
|
||||
case *ast.AssignStmt:
|
||||
checkShadowAssignment(f, n)
|
||||
case *ast.GenDecl:
|
||||
checkShadowDecl(f, n)
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
spans := make(map[types.Object]span)
|
||||
for id, obj := range pass.TypesInfo.Defs {
|
||||
// Ignore identifiers that don't denote objects
|
||||
// (package names, symbolic variables such as t
|
||||
// in t := x.(type) of type switch headers).
|
||||
if obj != nil {
|
||||
growSpan(spans, obj, id.Pos(), id.End())
|
||||
}
|
||||
}
|
||||
for id, obj := range pass.TypesInfo.Uses {
|
||||
growSpan(spans, obj, id.Pos(), id.End())
|
||||
}
|
||||
for node, obj := range pass.TypesInfo.Implicits {
|
||||
// A type switch with a short variable declaration
|
||||
// such as t := x.(type) doesn't declare the symbolic
|
||||
// variable (t in the example) at the switch header;
|
||||
// instead a new variable t (with specific type) is
|
||||
// declared implicitly for each case. Such variables
|
||||
// are found in the types.Info.Implicits (not Defs)
|
||||
// map. Add them here, assuming they are declared at
|
||||
// the type cases' colon ":".
|
||||
if cc, ok := node.(*ast.CaseClause); ok {
|
||||
growSpan(spans, obj, cc.Colon, cc.Colon)
|
||||
}
|
||||
}
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.AssignStmt)(nil),
|
||||
(*ast.GenDecl)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
switch n := n.(type) {
|
||||
case *ast.AssignStmt:
|
||||
checkShadowAssignment(pass, spans, n)
|
||||
case *ast.GenDecl:
|
||||
checkShadowDecl(pass, spans, n)
|
||||
}
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Span stores the minimum range of byte positions in the file in which a
|
||||
// A span stores the minimum range of byte positions in the file in which a
|
||||
// given variable (types.Object) is mentioned. It is lexically defined: it spans
|
||||
// from the beginning of its first mention to the end of its last mention.
|
||||
// A variable is considered shadowed (if *strictShadowing is off) only if the
|
||||
// A variable is considered shadowed (if strict is off) only if the
|
||||
// shadowing variable is declared within the span of the shadowed variable.
|
||||
// In other words, if a variable is shadowed but not used after the shadowed
|
||||
// variable is declared, it is inconsequential and not worth complaining about.
|
||||
|
@ -76,59 +120,56 @@ func checkShadow(f *File, node ast.Node) {
|
|||
// - A variable declared inside a function literal can falsely be identified
|
||||
// as shadowing a variable in the outer function.
|
||||
//
|
||||
type Span struct {
|
||||
type span struct {
|
||||
min token.Pos
|
||||
max token.Pos
|
||||
}
|
||||
|
||||
// contains reports whether the position is inside the span.
|
||||
func (s Span) contains(pos token.Pos) bool {
|
||||
func (s span) contains(pos token.Pos) bool {
|
||||
return s.min <= pos && pos < s.max
|
||||
}
|
||||
|
||||
// growSpan expands the span for the object to contain the instance represented
|
||||
// by the identifier.
|
||||
func (pkg *Package) growSpan(ident *ast.Ident, obj types.Object) {
|
||||
if *strictShadowing {
|
||||
// growSpan expands the span for the object to contain the source range [pos, end).
|
||||
func growSpan(spans map[types.Object]span, obj types.Object, pos, end token.Pos) {
|
||||
if strict {
|
||||
return // No need
|
||||
}
|
||||
pos := ident.Pos()
|
||||
end := ident.End()
|
||||
span, ok := pkg.spans[obj]
|
||||
s, ok := spans[obj]
|
||||
if ok {
|
||||
if span.min > pos {
|
||||
span.min = pos
|
||||
if s.min > pos {
|
||||
s.min = pos
|
||||
}
|
||||
if span.max < end {
|
||||
span.max = end
|
||||
if s.max < end {
|
||||
s.max = end
|
||||
}
|
||||
} else {
|
||||
span = Span{pos, end}
|
||||
s = span{pos, end}
|
||||
}
|
||||
pkg.spans[obj] = span
|
||||
spans[obj] = s
|
||||
}
|
||||
|
||||
// checkShadowAssignment checks for shadowing in a short variable declaration.
|
||||
func checkShadowAssignment(f *File, a *ast.AssignStmt) {
|
||||
func checkShadowAssignment(pass *analysis.Pass, spans map[types.Object]span, a *ast.AssignStmt) {
|
||||
if a.Tok != token.DEFINE {
|
||||
return
|
||||
}
|
||||
if f.idiomaticShortRedecl(a) {
|
||||
if idiomaticShortRedecl(pass, a) {
|
||||
return
|
||||
}
|
||||
for _, expr := range a.Lhs {
|
||||
ident, ok := expr.(*ast.Ident)
|
||||
if !ok {
|
||||
f.Badf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
|
||||
pass.Reportf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
|
||||
return
|
||||
}
|
||||
checkShadowing(f, ident)
|
||||
checkShadowing(pass, spans, ident)
|
||||
}
|
||||
}
|
||||
|
||||
// idiomaticShortRedecl reports whether this short declaration can be ignored for
|
||||
// the purposes of shadowing, that is, that any redeclarations it contains are deliberate.
|
||||
func (f *File) idiomaticShortRedecl(a *ast.AssignStmt) bool {
|
||||
func idiomaticShortRedecl(pass *analysis.Pass, a *ast.AssignStmt) bool {
|
||||
// Don't complain about deliberate redeclarations of the form
|
||||
// i := i
|
||||
// Such constructs are idiomatic in range loops to create a new variable
|
||||
|
@ -141,7 +182,7 @@ func (f *File) idiomaticShortRedecl(a *ast.AssignStmt) bool {
|
|||
for i, expr := range a.Lhs {
|
||||
lhs, ok := expr.(*ast.Ident)
|
||||
if !ok {
|
||||
f.Badf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
|
||||
pass.Reportf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
|
||||
return true // Don't do any more processing.
|
||||
}
|
||||
switch rhs := a.Rhs[i].(type) {
|
||||
|
@ -164,7 +205,7 @@ func (f *File) idiomaticShortRedecl(a *ast.AssignStmt) bool {
|
|||
|
||||
// idiomaticRedecl reports whether this declaration spec can be ignored for
|
||||
// the purposes of shadowing, that is, that any redeclarations it contains are deliberate.
|
||||
func (f *File) idiomaticRedecl(d *ast.ValueSpec) bool {
|
||||
func idiomaticRedecl(d *ast.ValueSpec) bool {
|
||||
// Don't complain about deliberate redeclarations of the form
|
||||
// var i, j = i, j
|
||||
if len(d.Names) != len(d.Values) {
|
||||
|
@ -181,34 +222,34 @@ func (f *File) idiomaticRedecl(d *ast.ValueSpec) bool {
|
|||
}
|
||||
|
||||
// checkShadowDecl checks for shadowing in a general variable declaration.
|
||||
func checkShadowDecl(f *File, d *ast.GenDecl) {
|
||||
func checkShadowDecl(pass *analysis.Pass, spans map[types.Object]span, d *ast.GenDecl) {
|
||||
if d.Tok != token.VAR {
|
||||
return
|
||||
}
|
||||
for _, spec := range d.Specs {
|
||||
valueSpec, ok := spec.(*ast.ValueSpec)
|
||||
if !ok {
|
||||
f.Badf(spec.Pos(), "invalid AST: var GenDecl not ValueSpec")
|
||||
pass.Reportf(spec.Pos(), "invalid AST: var GenDecl not ValueSpec")
|
||||
return
|
||||
}
|
||||
// Don't complain about deliberate redeclarations of the form
|
||||
// var i = i
|
||||
if f.idiomaticRedecl(valueSpec) {
|
||||
if idiomaticRedecl(valueSpec) {
|
||||
return
|
||||
}
|
||||
for _, ident := range valueSpec.Names {
|
||||
checkShadowing(f, ident)
|
||||
checkShadowing(pass, spans, ident)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkShadowing checks whether the identifier shadows an identifier in an outer scope.
|
||||
func checkShadowing(f *File, ident *ast.Ident) {
|
||||
func checkShadowing(pass *analysis.Pass, spans map[types.Object]span, ident *ast.Ident) {
|
||||
if ident.Name == "_" {
|
||||
// Can't shadow the blank identifier.
|
||||
return
|
||||
}
|
||||
obj := f.pkg.defs[ident]
|
||||
obj := pass.TypesInfo.Defs[ident]
|
||||
if obj == nil {
|
||||
return
|
||||
}
|
||||
|
@ -222,7 +263,7 @@ func checkShadowing(f *File, ident *ast.Ident) {
|
|||
if shadowed.Parent() == types.Universe {
|
||||
return
|
||||
}
|
||||
if *strictShadowing {
|
||||
if strict {
|
||||
// The shadowed identifier must appear before this one to be an instance of shadowing.
|
||||
if shadowed.Pos() > ident.Pos() {
|
||||
return
|
||||
|
@ -230,9 +271,9 @@ func checkShadowing(f *File, ident *ast.Ident) {
|
|||
} else {
|
||||
// Don't complain if the span of validity of the shadowed identifier doesn't include
|
||||
// the shadowing identifier.
|
||||
span, ok := f.pkg.spans[shadowed]
|
||||
span, ok := spans[shadowed]
|
||||
if !ok {
|
||||
f.Badf(ident.Pos(), "internal error: no range for %q", ident.Name)
|
||||
pass.Reportf(ident.Pos(), "internal error: no range for %q", ident.Name)
|
||||
return
|
||||
}
|
||||
if !span.contains(ident.Pos()) {
|
||||
|
@ -241,6 +282,7 @@ func checkShadowing(f *File, ident *ast.Ident) {
|
|||
}
|
||||
// Don't complain if the types differ: that implies the programmer really wants two different things.
|
||||
if types.Identical(obj.Type(), shadowed.Type()) {
|
||||
f.Badf(ident.Pos(), "declaration of %q shadows declaration at %s", obj.Name(), f.loc(shadowed.Pos()))
|
||||
line := pass.Fset.Position(shadowed.Pos()).Line
|
||||
pass.Reportf(ident.Pos(), "declaration of %q shadows declaration at line %d", obj.Name(), line)
|
||||
}
|
||||
}
|
|
@ -1,37 +1,50 @@
|
|||
// Copyright 2017 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
//
|
||||
// Simplified dead code detector. Used for skipping certain checks
|
||||
// on unreachable code (for instance, shift checks on arch-specific code).
|
||||
|
||||
package govet
|
||||
package shift
|
||||
|
||||
// Simplified dead code detector.
|
||||
// Used for skipping shift checks on unreachable arch-specific code.
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/constant"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
// updateDead puts unreachable "if" and "case" nodes into f.dead.
|
||||
func (f *File) updateDead(node ast.Node) {
|
||||
if f.dead[node] {
|
||||
// updateDead puts unreachable "if" and "case" nodes into dead.
|
||||
func updateDead(info *types.Info, dead map[ast.Node]bool, node ast.Node) {
|
||||
if dead[node] {
|
||||
// The node is already marked as dead.
|
||||
return
|
||||
}
|
||||
|
||||
// setDead marks the node and all the children as dead.
|
||||
setDead := func(n ast.Node) {
|
||||
ast.Inspect(n, func(node ast.Node) bool {
|
||||
if node != nil {
|
||||
dead[node] = true
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
switch stmt := node.(type) {
|
||||
case *ast.IfStmt:
|
||||
// "if" branch is dead if its condition evaluates
|
||||
// to constant false.
|
||||
v := f.pkg.types[stmt.Cond].Value
|
||||
v := info.Types[stmt.Cond].Value
|
||||
if v == nil {
|
||||
return
|
||||
}
|
||||
if !constant.BoolVal(v) {
|
||||
f.setDead(stmt.Body)
|
||||
setDead(stmt.Body)
|
||||
return
|
||||
}
|
||||
f.setDead(stmt.Else)
|
||||
if stmt.Else != nil {
|
||||
setDead(stmt.Else)
|
||||
}
|
||||
case *ast.SwitchStmt:
|
||||
// Case clause with empty switch tag is dead if it evaluates
|
||||
// to constant false.
|
||||
|
@ -44,12 +57,12 @@ func (f *File) updateDead(node ast.Node) {
|
|||
continue
|
||||
}
|
||||
for _, expr := range cc.List {
|
||||
v := f.pkg.types[expr].Value
|
||||
v := info.Types[expr].Value
|
||||
if v == nil || v.Kind() != constant.Bool || constant.BoolVal(v) {
|
||||
continue BodyLoopBool
|
||||
}
|
||||
}
|
||||
f.setDead(cc)
|
||||
setDead(cc)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
@ -57,7 +70,7 @@ func (f *File) updateDead(node ast.Node) {
|
|||
// Case clause is dead if its constant value doesn't match
|
||||
// the constant value from the switch tag.
|
||||
// TODO: This handles integer comparisons only.
|
||||
v := f.pkg.types[stmt.Tag].Value
|
||||
v := info.Types[stmt.Tag].Value
|
||||
if v == nil || v.Kind() != constant.Int {
|
||||
return
|
||||
}
|
||||
|
@ -73,7 +86,7 @@ func (f *File) updateDead(node ast.Node) {
|
|||
continue
|
||||
}
|
||||
for _, expr := range cc.List {
|
||||
v := f.pkg.types[expr].Value
|
||||
v := info.Types[expr].Value
|
||||
if v == nil {
|
||||
continue BodyLoopInt
|
||||
}
|
||||
|
@ -82,27 +95,7 @@ func (f *File) updateDead(node ast.Node) {
|
|||
continue BodyLoopInt
|
||||
}
|
||||
}
|
||||
f.setDead(cc)
|
||||
setDead(cc)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// setDead marks the node and all the children as dead.
|
||||
func (f *File) setDead(node ast.Node) {
|
||||
dv := deadVisitor{
|
||||
f: f,
|
||||
}
|
||||
ast.Walk(dv, node)
|
||||
}
|
||||
|
||||
type deadVisitor struct {
|
||||
f *File
|
||||
}
|
||||
|
||||
func (dv deadVisitor) Visit(node ast.Node) ast.Visitor {
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
dv.f.dead[node] = true
|
||||
return dv
|
||||
}
|
99
vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go
generated
vendored
Normal file
99
vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package shift defines an Analyzer that checks for shifts that exceed
|
||||
// the width of an integer.
|
||||
package shift
|
||||
|
||||
// TODO(adonovan): integrate with ctrflow (CFG-based) dead code analysis. May
|
||||
// have impedance mismatch due to its (non-)treatment of constant
|
||||
// expressions (such as runtime.GOARCH=="386").
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/constant"
|
||||
"go/token"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "shift",
|
||||
Doc: "check for shifts that equal or exceed the width of the integer",
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
// Do a complete pass to compute dead nodes.
|
||||
dead := make(map[ast.Node]bool)
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.IfStmt)(nil),
|
||||
(*ast.SwitchStmt)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
// TODO(adonovan): move updateDead into this file.
|
||||
updateDead(pass.TypesInfo, dead, n)
|
||||
})
|
||||
|
||||
nodeFilter = []ast.Node{
|
||||
(*ast.AssignStmt)(nil),
|
||||
(*ast.BinaryExpr)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(node ast.Node) {
|
||||
if dead[node] {
|
||||
// Skip shift checks on unreachable nodes.
|
||||
return
|
||||
}
|
||||
|
||||
switch node := node.(type) {
|
||||
case *ast.BinaryExpr:
|
||||
if node.Op == token.SHL || node.Op == token.SHR {
|
||||
checkLongShift(pass, node, node.X, node.Y)
|
||||
}
|
||||
case *ast.AssignStmt:
|
||||
if len(node.Lhs) != 1 || len(node.Rhs) != 1 {
|
||||
return
|
||||
}
|
||||
if node.Tok == token.SHL_ASSIGN || node.Tok == token.SHR_ASSIGN {
|
||||
checkLongShift(pass, node, node.Lhs[0], node.Rhs[0])
|
||||
}
|
||||
}
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// checkLongShift checks if shift or shift-assign operations shift by more than
|
||||
// the length of the underlying variable.
|
||||
func checkLongShift(pass *analysis.Pass, node ast.Node, x, y ast.Expr) {
|
||||
if pass.TypesInfo.Types[x].Value != nil {
|
||||
// Ignore shifts of constants.
|
||||
// These are frequently used for bit-twiddling tricks
|
||||
// like ^uint(0) >> 63 for 32/64 bit detection and compatibility.
|
||||
return
|
||||
}
|
||||
|
||||
v := pass.TypesInfo.Types[y].Value
|
||||
if v == nil {
|
||||
return
|
||||
}
|
||||
amt, ok := constant.Int64Val(v)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
t := pass.TypesInfo.Types[x].Type
|
||||
if t == nil {
|
||||
return
|
||||
}
|
||||
size := 8 * pass.TypesSizes.Sizeof(t)
|
||||
if amt >= size {
|
||||
ident := analysisutil.Format(pass.Fset, x)
|
||||
pass.Reportf(node.Pos(), "%s (%d bits) too small for shift of %d", ident, size, amt)
|
||||
}
|
||||
}
|
|
@ -2,27 +2,47 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the code to check canonical methods.
|
||||
|
||||
package govet
|
||||
// Package stdmethods defines an Analyzer that checks for misspellings
|
||||
// in the signatures of methods similar to well-known interfaces.
|
||||
package stdmethods
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/printer"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("methods",
|
||||
"check that canonically named methods are canonically defined",
|
||||
checkCanonicalMethod,
|
||||
funcDecl, interfaceType)
|
||||
}
|
||||
const Doc = `check signature of methods of well-known interfaces
|
||||
|
||||
type MethodSig struct {
|
||||
args []string
|
||||
results []string
|
||||
Sometimes a type may be intended to satisfy an interface but may fail to
|
||||
do so because of a mistake in its method signature.
|
||||
For example, the result of this WriteTo method should be (int64, error),
|
||||
not error, to satisfy io.WriterTo:
|
||||
|
||||
type myWriterTo struct{...}
|
||||
func (myWriterTo) WriteTo(w io.Writer) error { ... }
|
||||
|
||||
This check ensures that each method whose name matches one of several
|
||||
well-known interface methods from the standard library has the correct
|
||||
signature for that interface.
|
||||
|
||||
Checked method names include:
|
||||
Format GobEncode GobDecode MarshalJSON MarshalXML
|
||||
Peek ReadByte ReadFrom ReadRune Scan Seek
|
||||
UnmarshalJSON UnreadByte UnreadRune WriteByte
|
||||
WriteTo
|
||||
`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "stdmethods",
|
||||
Doc: Doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
Run: run,
|
||||
}
|
||||
|
||||
// canonicalMethods lists the input and output types for Go methods
|
||||
|
@ -41,7 +61,7 @@ type MethodSig struct {
|
|||
// method doesn't have a fmt.ScanState as its first argument,
|
||||
// we let it go. But if it does have a fmt.ScanState, then the
|
||||
// rest has to match.
|
||||
var canonicalMethods = map[string]MethodSig{
|
||||
var canonicalMethods = map[string]struct{ args, results []string }{
|
||||
// "Flush": {{}, {"error"}}, // http.Flusher and jpeg.writer conflict
|
||||
"Format": {[]string{"=fmt.State", "rune"}, []string{}}, // fmt.Formatter
|
||||
"GobDecode": {[]string{"[]byte"}, []string{"error"}}, // gob.GobDecoder
|
||||
|
@ -61,22 +81,31 @@ var canonicalMethods = map[string]MethodSig{
|
|||
"WriteTo": {[]string{"=io.Writer"}, []string{"int64", "error"}}, // io.WriterTo
|
||||
}
|
||||
|
||||
func checkCanonicalMethod(f *File, node ast.Node) {
|
||||
switch n := node.(type) {
|
||||
case *ast.FuncDecl:
|
||||
if n.Recv != nil {
|
||||
canonicalMethod(f, n.Name, n.Type)
|
||||
}
|
||||
case *ast.InterfaceType:
|
||||
for _, field := range n.Methods.List {
|
||||
for _, id := range field.Names {
|
||||
canonicalMethod(f, id, field.Type.(*ast.FuncType))
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.FuncDecl)(nil),
|
||||
(*ast.InterfaceType)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
switch n := n.(type) {
|
||||
case *ast.FuncDecl:
|
||||
if n.Recv != nil {
|
||||
canonicalMethod(pass, n.Name)
|
||||
}
|
||||
case *ast.InterfaceType:
|
||||
for _, field := range n.Methods.List {
|
||||
for _, id := range field.Names {
|
||||
canonicalMethod(pass, id)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func canonicalMethod(f *File, id *ast.Ident, t *ast.FuncType) {
|
||||
func canonicalMethod(pass *analysis.Pass, id *ast.Ident) {
|
||||
// Expected input/output.
|
||||
expect, ok := canonicalMethods[id.Name]
|
||||
if !ok {
|
||||
|
@ -84,19 +113,17 @@ func canonicalMethod(f *File, id *ast.Ident, t *ast.FuncType) {
|
|||
}
|
||||
|
||||
// Actual input/output
|
||||
args := typeFlatten(t.Params.List)
|
||||
var results []ast.Expr
|
||||
if t.Results != nil {
|
||||
results = typeFlatten(t.Results.List)
|
||||
}
|
||||
sign := pass.TypesInfo.Defs[id].Type().(*types.Signature)
|
||||
args := sign.Params()
|
||||
results := sign.Results()
|
||||
|
||||
// Do the =s (if any) all match?
|
||||
if !f.matchParams(expect.args, args, "=") || !f.matchParams(expect.results, results, "=") {
|
||||
if !matchParams(pass, expect.args, args, "=") || !matchParams(pass, expect.results, results, "=") {
|
||||
return
|
||||
}
|
||||
|
||||
// Everything must match.
|
||||
if !f.matchParams(expect.args, args, "") || !f.matchParams(expect.results, results, "") {
|
||||
if !matchParams(pass, expect.args, args, "") || !matchParams(pass, expect.results, results, "") {
|
||||
expectFmt := id.Name + "(" + argjoin(expect.args) + ")"
|
||||
if len(expect.results) == 1 {
|
||||
expectFmt += " " + argjoin(expect.results)
|
||||
|
@ -104,18 +131,18 @@ func canonicalMethod(f *File, id *ast.Ident, t *ast.FuncType) {
|
|||
expectFmt += " (" + argjoin(expect.results) + ")"
|
||||
}
|
||||
|
||||
f.b.Reset()
|
||||
if err := printer.Fprint(&f.b, f.fset, t); err != nil {
|
||||
fmt.Fprintf(&f.b, "<%s>", err)
|
||||
}
|
||||
actual := f.b.String()
|
||||
actual := typeString(sign)
|
||||
actual = strings.TrimPrefix(actual, "func")
|
||||
actual = id.Name + actual
|
||||
|
||||
f.Badf(id.Pos(), "method %s should have signature %s", actual, expectFmt)
|
||||
pass.Reportf(id.Pos(), "method %s should have signature %s", actual, expectFmt)
|
||||
}
|
||||
}
|
||||
|
||||
func typeString(typ types.Type) string {
|
||||
return types.TypeString(typ, (*types.Package).Name)
|
||||
}
|
||||
|
||||
func argjoin(x []string) string {
|
||||
y := make([]string, len(x))
|
||||
for i, s := range x {
|
||||
|
@ -127,53 +154,33 @@ func argjoin(x []string) string {
|
|||
return strings.Join(y, ", ")
|
||||
}
|
||||
|
||||
// Turn parameter list into slice of types
|
||||
// (in the ast, types are Exprs).
|
||||
// Have to handle f(int, bool) and f(x, y, z int)
|
||||
// so not a simple 1-to-1 conversion.
|
||||
func typeFlatten(l []*ast.Field) []ast.Expr {
|
||||
var t []ast.Expr
|
||||
for _, f := range l {
|
||||
if len(f.Names) == 0 {
|
||||
t = append(t, f.Type)
|
||||
continue
|
||||
}
|
||||
for range f.Names {
|
||||
t = append(t, f.Type)
|
||||
}
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
// Does each type in expect with the given prefix match the corresponding type in actual?
|
||||
func (f *File) matchParams(expect []string, actual []ast.Expr, prefix string) bool {
|
||||
func matchParams(pass *analysis.Pass, expect []string, actual *types.Tuple, prefix string) bool {
|
||||
for i, x := range expect {
|
||||
if !strings.HasPrefix(x, prefix) {
|
||||
continue
|
||||
}
|
||||
if i >= len(actual) {
|
||||
if i >= actual.Len() {
|
||||
return false
|
||||
}
|
||||
if !f.matchParamType(x, actual[i]) {
|
||||
if !matchParamType(pass.Fset, pass.Pkg, x, actual.At(i).Type()) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if prefix == "" && len(actual) > len(expect) {
|
||||
if prefix == "" && actual.Len() > len(expect) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Does this one type match?
|
||||
func (f *File) matchParamType(expect string, actual ast.Expr) bool {
|
||||
func matchParamType(fset *token.FileSet, pkg *types.Package, expect string, actual types.Type) bool {
|
||||
expect = strings.TrimPrefix(expect, "=")
|
||||
// Strip package name if we're in that package.
|
||||
if n := len(f.file.Name.Name); len(expect) > n && expect[:n] == f.file.Name.Name && expect[n] == '.' {
|
||||
if n := len(pkg.Name()); len(expect) > n && expect[:n] == pkg.Name() && expect[n] == '.' {
|
||||
expect = expect[n+1:]
|
||||
}
|
||||
|
||||
// Overkill but easy.
|
||||
f.b.Reset()
|
||||
printer.Fprint(&f.b, f.fset, actual)
|
||||
return f.b.String() == expect
|
||||
return typeString(actual) == expect
|
||||
}
|
273
vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go
generated
vendored
Normal file
273
vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go
generated
vendored
Normal file
|
@ -0,0 +1,273 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package structtag defines an Analyzer that checks struct field tags
|
||||
// are well formed.
|
||||
package structtag
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
const Doc = `check that struct field tags conform to reflect.StructTag.Get
|
||||
|
||||
Also report certain struct tags (json, xml) used with unexported fields.`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "structtag",
|
||||
Doc: Doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
RunDespiteErrors: true,
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.StructType)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
styp := pass.TypesInfo.Types[n.(*ast.StructType)].Type.(*types.Struct)
|
||||
var seen map[[2]string]token.Pos
|
||||
for i := 0; i < styp.NumFields(); i++ {
|
||||
field := styp.Field(i)
|
||||
tag := styp.Tag(i)
|
||||
checkCanonicalFieldTag(pass, field, tag, &seen)
|
||||
}
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var checkTagDups = []string{"json", "xml"}
|
||||
var checkTagSpaces = map[string]bool{"json": true, "xml": true, "asn1": true}
|
||||
|
||||
// checkCanonicalFieldTag checks a single struct field tag.
|
||||
func checkCanonicalFieldTag(pass *analysis.Pass, field *types.Var, tag string, seen *map[[2]string]token.Pos) {
|
||||
for _, key := range checkTagDups {
|
||||
checkTagDuplicates(pass, tag, key, field, field, seen)
|
||||
}
|
||||
|
||||
if err := validateStructTag(tag); err != nil {
|
||||
pass.Reportf(field.Pos(), "struct field tag %#q not compatible with reflect.StructTag.Get: %s", tag, err)
|
||||
}
|
||||
|
||||
// Check for use of json or xml tags with unexported fields.
|
||||
|
||||
// Embedded struct. Nothing to do for now, but that
|
||||
// may change, depending on what happens with issue 7363.
|
||||
// TODO(adonovan): investigate, now that that issue is fixed.
|
||||
if field.Anonymous() {
|
||||
return
|
||||
}
|
||||
|
||||
if field.Exported() {
|
||||
return
|
||||
}
|
||||
|
||||
for _, enc := range [...]string{"json", "xml"} {
|
||||
if reflect.StructTag(tag).Get(enc) != "" {
|
||||
pass.Reportf(field.Pos(), "struct field %s has %s tag but is not exported", field.Name(), enc)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkTagDuplicates checks a single struct field tag to see if any tags are
|
||||
// duplicated. nearest is the field that's closest to the field being checked,
|
||||
// while still being part of the top-level struct type.
|
||||
func checkTagDuplicates(pass *analysis.Pass, tag, key string, nearest, field *types.Var, seen *map[[2]string]token.Pos) {
|
||||
val := reflect.StructTag(tag).Get(key)
|
||||
if val == "-" {
|
||||
// Ignored, even if the field is anonymous.
|
||||
return
|
||||
}
|
||||
if val == "" || val[0] == ',' {
|
||||
if field.Anonymous() {
|
||||
typ, ok := field.Type().Underlying().(*types.Struct)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
for i := 0; i < typ.NumFields(); i++ {
|
||||
field := typ.Field(i)
|
||||
if !field.Exported() {
|
||||
continue
|
||||
}
|
||||
tag := typ.Tag(i)
|
||||
checkTagDuplicates(pass, tag, key, nearest, field, seen)
|
||||
}
|
||||
}
|
||||
// Ignored if the field isn't anonymous.
|
||||
return
|
||||
}
|
||||
if key == "xml" && field.Name() == "XMLName" {
|
||||
// XMLName defines the XML element name of the struct being
|
||||
// checked. That name cannot collide with element or attribute
|
||||
// names defined on other fields of the struct. Vet does not have a
|
||||
// check for untagged fields of type struct defining their own name
|
||||
// by containing a field named XMLName; see issue 18256.
|
||||
return
|
||||
}
|
||||
if i := strings.Index(val, ","); i >= 0 {
|
||||
if key == "xml" {
|
||||
// Use a separate namespace for XML attributes.
|
||||
for _, opt := range strings.Split(val[i:], ",") {
|
||||
if opt == "attr" {
|
||||
key += " attribute" // Key is part of the error message.
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
val = val[:i]
|
||||
}
|
||||
if *seen == nil {
|
||||
*seen = map[[2]string]token.Pos{}
|
||||
}
|
||||
if pos, ok := (*seen)[[2]string{key, val}]; ok {
|
||||
alsoPos := pass.Fset.Position(pos)
|
||||
alsoPos.Column = 0
|
||||
|
||||
// Make the "also at" position relative to the current position,
|
||||
// to ensure that all warnings are unambiguous and correct. For
|
||||
// example, via anonymous struct fields, it's possible for the
|
||||
// two fields to be in different packages and directories.
|
||||
thisPos := pass.Fset.Position(field.Pos())
|
||||
rel, err := filepath.Rel(filepath.Dir(thisPos.Filename), alsoPos.Filename)
|
||||
if err != nil {
|
||||
// Possibly because the paths are relative; leave the
|
||||
// filename alone.
|
||||
} else {
|
||||
alsoPos.Filename = rel
|
||||
}
|
||||
|
||||
pass.Reportf(nearest.Pos(), "struct field %s repeats %s tag %q also at %s", field.Name(), key, val, alsoPos)
|
||||
} else {
|
||||
(*seen)[[2]string{key, val}] = field.Pos()
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
errTagSyntax = errors.New("bad syntax for struct tag pair")
|
||||
errTagKeySyntax = errors.New("bad syntax for struct tag key")
|
||||
errTagValueSyntax = errors.New("bad syntax for struct tag value")
|
||||
errTagValueSpace = errors.New("suspicious space in struct tag value")
|
||||
errTagSpace = errors.New("key:\"value\" pairs not separated by spaces")
|
||||
)
|
||||
|
||||
// validateStructTag parses the struct tag and returns an error if it is not
|
||||
// in the canonical format, which is a space-separated list of key:"value"
|
||||
// settings. The value may contain spaces.
|
||||
func validateStructTag(tag string) error {
|
||||
// This code is based on the StructTag.Get code in package reflect.
|
||||
|
||||
n := 0
|
||||
for ; tag != ""; n++ {
|
||||
if n > 0 && tag != "" && tag[0] != ' ' {
|
||||
// More restrictive than reflect, but catches likely mistakes
|
||||
// like `x:"foo",y:"bar"`, which parses as `x:"foo" ,y:"bar"` with second key ",y".
|
||||
return errTagSpace
|
||||
}
|
||||
// Skip leading space.
|
||||
i := 0
|
||||
for i < len(tag) && tag[i] == ' ' {
|
||||
i++
|
||||
}
|
||||
tag = tag[i:]
|
||||
if tag == "" {
|
||||
break
|
||||
}
|
||||
|
||||
// Scan to colon. A space, a quote or a control character is a syntax error.
|
||||
// Strictly speaking, control chars include the range [0x7f, 0x9f], not just
|
||||
// [0x00, 0x1f], but in practice, we ignore the multi-byte control characters
|
||||
// as it is simpler to inspect the tag's bytes than the tag's runes.
|
||||
i = 0
|
||||
for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f {
|
||||
i++
|
||||
}
|
||||
if i == 0 {
|
||||
return errTagKeySyntax
|
||||
}
|
||||
if i+1 >= len(tag) || tag[i] != ':' {
|
||||
return errTagSyntax
|
||||
}
|
||||
if tag[i+1] != '"' {
|
||||
return errTagValueSyntax
|
||||
}
|
||||
key := tag[:i]
|
||||
tag = tag[i+1:]
|
||||
|
||||
// Scan quoted string to find value.
|
||||
i = 1
|
||||
for i < len(tag) && tag[i] != '"' {
|
||||
if tag[i] == '\\' {
|
||||
i++
|
||||
}
|
||||
i++
|
||||
}
|
||||
if i >= len(tag) {
|
||||
return errTagValueSyntax
|
||||
}
|
||||
qvalue := tag[:i+1]
|
||||
tag = tag[i+1:]
|
||||
|
||||
value, err := strconv.Unquote(qvalue)
|
||||
if err != nil {
|
||||
return errTagValueSyntax
|
||||
}
|
||||
|
||||
if !checkTagSpaces[key] {
|
||||
continue
|
||||
}
|
||||
|
||||
switch key {
|
||||
case "xml":
|
||||
// If the first or last character in the XML tag is a space, it is
|
||||
// suspicious.
|
||||
if strings.Trim(value, " ") != value {
|
||||
return errTagValueSpace
|
||||
}
|
||||
|
||||
// If there are multiple spaces, they are suspicious.
|
||||
if strings.Count(value, " ") > 1 {
|
||||
return errTagValueSpace
|
||||
}
|
||||
|
||||
// If there is no comma, skip the rest of the checks.
|
||||
comma := strings.IndexRune(value, ',')
|
||||
if comma < 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// If the character before a comma is a space, this is suspicious.
|
||||
if comma > 0 && value[comma-1] == ' ' {
|
||||
return errTagValueSpace
|
||||
}
|
||||
value = value[comma+1:]
|
||||
case "json":
|
||||
// JSON allows using spaces in the name, so skip it.
|
||||
comma := strings.IndexRune(value, ',')
|
||||
if comma < 0 {
|
||||
continue
|
||||
}
|
||||
value = value[comma+1:]
|
||||
}
|
||||
|
||||
if strings.IndexByte(value, ' ') >= 0 {
|
||||
return errTagValueSpace
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
175
vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go
generated
vendored
Normal file
175
vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go
generated
vendored
Normal file
|
@ -0,0 +1,175 @@
|
|||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package tests defines an Analyzer that checks for common mistaken
|
||||
// usages of tests and examples.
|
||||
package tests
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/types"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
)
|
||||
|
||||
const Doc = `check for common mistaken usages of tests and examples
|
||||
|
||||
The tests checker walks Test, Benchmark and Example functions checking
|
||||
malformed names, wrong signatures and examples documenting non-existent
|
||||
identifiers.`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "tests",
|
||||
Doc: Doc,
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
for _, f := range pass.Files {
|
||||
if !strings.HasSuffix(pass.Fset.File(f.Pos()).Name(), "_test.go") {
|
||||
continue
|
||||
}
|
||||
for _, decl := range f.Decls {
|
||||
fn, ok := decl.(*ast.FuncDecl)
|
||||
if !ok || fn.Recv != nil {
|
||||
// Ignore non-functions or functions with receivers.
|
||||
continue
|
||||
}
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(fn.Name.Name, "Example"):
|
||||
checkExample(pass, fn)
|
||||
case strings.HasPrefix(fn.Name.Name, "Test"):
|
||||
checkTest(pass, fn, "Test")
|
||||
case strings.HasPrefix(fn.Name.Name, "Benchmark"):
|
||||
checkTest(pass, fn, "Benchmark")
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func isExampleSuffix(s string) bool {
|
||||
r, size := utf8.DecodeRuneInString(s)
|
||||
return size > 0 && unicode.IsLower(r)
|
||||
}
|
||||
|
||||
func isTestSuffix(name string) bool {
|
||||
if len(name) == 0 {
|
||||
// "Test" is ok.
|
||||
return true
|
||||
}
|
||||
r, _ := utf8.DecodeRuneInString(name)
|
||||
return !unicode.IsLower(r)
|
||||
}
|
||||
|
||||
func isTestParam(typ ast.Expr, wantType string) bool {
|
||||
ptr, ok := typ.(*ast.StarExpr)
|
||||
if !ok {
|
||||
// Not a pointer.
|
||||
return false
|
||||
}
|
||||
// No easy way of making sure it's a *testing.T or *testing.B:
|
||||
// ensure the name of the type matches.
|
||||
if name, ok := ptr.X.(*ast.Ident); ok {
|
||||
return name.Name == wantType
|
||||
}
|
||||
if sel, ok := ptr.X.(*ast.SelectorExpr); ok {
|
||||
return sel.Sel.Name == wantType
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func lookup(pkg *types.Package, name string) types.Object {
|
||||
if o := pkg.Scope().Lookup(name); o != nil {
|
||||
return o
|
||||
}
|
||||
|
||||
// If this package is ".../foo_test" and it imports a package
|
||||
// ".../foo", try looking in the latter package.
|
||||
// This heuristic should work even on build systems that do not
|
||||
// record any special link between the packages.
|
||||
if basePath := strings.TrimSuffix(pkg.Path(), "_test"); basePath != pkg.Path() {
|
||||
for _, imp := range pkg.Imports() {
|
||||
if imp.Path() == basePath {
|
||||
return imp.Scope().Lookup(name)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func checkExample(pass *analysis.Pass, fn *ast.FuncDecl) {
|
||||
fnName := fn.Name.Name
|
||||
if params := fn.Type.Params; len(params.List) != 0 {
|
||||
pass.Reportf(fn.Pos(), "%s should be niladic", fnName)
|
||||
}
|
||||
if results := fn.Type.Results; results != nil && len(results.List) != 0 {
|
||||
pass.Reportf(fn.Pos(), "%s should return nothing", fnName)
|
||||
}
|
||||
|
||||
if fnName == "Example" {
|
||||
// Nothing more to do.
|
||||
return
|
||||
}
|
||||
|
||||
var (
|
||||
exName = strings.TrimPrefix(fnName, "Example")
|
||||
elems = strings.SplitN(exName, "_", 3)
|
||||
ident = elems[0]
|
||||
obj = lookup(pass.Pkg, ident)
|
||||
)
|
||||
if ident != "" && obj == nil {
|
||||
// Check ExampleFoo and ExampleBadFoo.
|
||||
pass.Reportf(fn.Pos(), "%s refers to unknown identifier: %s", fnName, ident)
|
||||
// Abort since obj is absent and no subsequent checks can be performed.
|
||||
return
|
||||
}
|
||||
if len(elems) < 2 {
|
||||
// Nothing more to do.
|
||||
return
|
||||
}
|
||||
|
||||
if ident == "" {
|
||||
// Check Example_suffix and Example_BadSuffix.
|
||||
if residual := strings.TrimPrefix(exName, "_"); !isExampleSuffix(residual) {
|
||||
pass.Reportf(fn.Pos(), "%s has malformed example suffix: %s", fnName, residual)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
mmbr := elems[1]
|
||||
if !isExampleSuffix(mmbr) {
|
||||
// Check ExampleFoo_Method and ExampleFoo_BadMethod.
|
||||
if obj, _, _ := types.LookupFieldOrMethod(obj.Type(), true, obj.Pkg(), mmbr); obj == nil {
|
||||
pass.Reportf(fn.Pos(), "%s refers to unknown field or method: %s.%s", fnName, ident, mmbr)
|
||||
}
|
||||
}
|
||||
if len(elems) == 3 && !isExampleSuffix(elems[2]) {
|
||||
// Check ExampleFoo_Method_suffix and ExampleFoo_Method_Badsuffix.
|
||||
pass.Reportf(fn.Pos(), "%s has malformed example suffix: %s", fnName, elems[2])
|
||||
}
|
||||
}
|
||||
|
||||
func checkTest(pass *analysis.Pass, fn *ast.FuncDecl, prefix string) {
|
||||
// Want functions with 0 results and 1 parameter.
|
||||
if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 ||
|
||||
fn.Type.Params == nil ||
|
||||
len(fn.Type.Params.List) != 1 ||
|
||||
len(fn.Type.Params.List[0].Names) > 1 {
|
||||
return
|
||||
}
|
||||
|
||||
// The param must look like a *testing.T or *testing.B.
|
||||
if !isTestParam(fn.Type.Params.List[0].Type, prefix[:1]) {
|
||||
return
|
||||
}
|
||||
|
||||
if !isTestSuffix(fn.Name.Name[len(prefix):]) {
|
||||
pass.Reportf(fn.Pos(), "%s has malformed name: first letter after '%s' must not be lowercase", fn.Name.Name, prefix)
|
||||
}
|
||||
}
|
92
vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go
generated
vendored
Normal file
92
vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go
generated
vendored
Normal file
|
@ -0,0 +1,92 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// The unmarshal package defines an Analyzer that checks for passing
|
||||
// non-pointer or non-interface types to unmarshal and decode functions.
|
||||
package unmarshal
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/types"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
"golang.org/x/tools/go/types/typeutil"
|
||||
)
|
||||
|
||||
const doc = `report passing non-pointer or non-interface values to unmarshal
|
||||
|
||||
The unmarshal analysis reports calls to functions such as json.Unmarshal
|
||||
in which the argument type is not a pointer or an interface.`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "unmarshal",
|
||||
Doc: doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.CallExpr)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
call := n.(*ast.CallExpr)
|
||||
fn := typeutil.StaticCallee(pass.TypesInfo, call)
|
||||
if fn == nil {
|
||||
return // not a static call
|
||||
}
|
||||
|
||||
// Classify the callee (without allocating memory).
|
||||
argidx := -1
|
||||
recv := fn.Type().(*types.Signature).Recv()
|
||||
if fn.Name() == "Unmarshal" && recv == nil {
|
||||
// "encoding/json".Unmarshal
|
||||
// "encoding/xml".Unmarshal
|
||||
switch fn.Pkg().Path() {
|
||||
case "encoding/json", "encoding/xml":
|
||||
argidx = 1 // func([]byte, interface{})
|
||||
}
|
||||
} else if fn.Name() == "Decode" && recv != nil {
|
||||
// (*"encoding/json".Decoder).Decode
|
||||
// (* "encoding/gob".Decoder).Decode
|
||||
// (* "encoding/xml".Decoder).Decode
|
||||
t := recv.Type()
|
||||
if ptr, ok := t.(*types.Pointer); ok {
|
||||
t = ptr.Elem()
|
||||
}
|
||||
tname := t.(*types.Named).Obj()
|
||||
if tname.Name() == "Decoder" {
|
||||
switch tname.Pkg().Path() {
|
||||
case "encoding/json", "encoding/xml", "encoding/gob":
|
||||
argidx = 0 // func(interface{})
|
||||
}
|
||||
}
|
||||
}
|
||||
if argidx < 0 {
|
||||
return // not a function we are interested in
|
||||
}
|
||||
|
||||
if len(call.Args) < argidx+1 {
|
||||
return // not enough arguments, e.g. called with return values of another function
|
||||
}
|
||||
|
||||
t := pass.TypesInfo.Types[call.Args[argidx]].Type
|
||||
switch t.Underlying().(type) {
|
||||
case *types.Pointer, *types.Interface:
|
||||
return
|
||||
}
|
||||
|
||||
switch argidx {
|
||||
case 0:
|
||||
pass.Reportf(call.Lparen, "call of %s passes non-pointer", fn.Name())
|
||||
case 1:
|
||||
pass.Reportf(call.Lparen, "call of %s passes non-pointer as second argument", fn.Name())
|
||||
}
|
||||
})
|
||||
return nil, nil
|
||||
}
|
|
@ -2,24 +2,68 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Check for syntactically unreachable code.
|
||||
// Package unreachable defines an Analyzer that checks for unreachable code.
|
||||
package unreachable
|
||||
|
||||
package govet
|
||||
// TODO(adonovan): use the new cfg package, which is more precise.
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"log"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("unreachable",
|
||||
"check for unreachable code",
|
||||
checkUnreachable,
|
||||
funcDecl, funcLit)
|
||||
const Doc = `check for unreachable code
|
||||
|
||||
The unreachable analyzer finds statements that execution can never reach
|
||||
because they are preceded by an return statement, a call to panic, an
|
||||
infinite loop, or similar constructs.`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "unreachable",
|
||||
Doc: Doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
RunDespiteErrors: true,
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.FuncDecl)(nil),
|
||||
(*ast.FuncLit)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
var body *ast.BlockStmt
|
||||
switch n := n.(type) {
|
||||
case *ast.FuncDecl:
|
||||
body = n.Body
|
||||
case *ast.FuncLit:
|
||||
body = n.Body
|
||||
}
|
||||
if body == nil {
|
||||
return
|
||||
}
|
||||
d := &deadState{
|
||||
pass: pass,
|
||||
hasBreak: make(map[ast.Stmt]bool),
|
||||
hasGoto: make(map[string]bool),
|
||||
labels: make(map[string]ast.Stmt),
|
||||
}
|
||||
d.findLabels(body)
|
||||
d.reachable = true
|
||||
d.findDead(body)
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
type deadState struct {
|
||||
f *File
|
||||
pass *analysis.Pass
|
||||
hasBreak map[ast.Stmt]bool
|
||||
hasGoto map[string]bool
|
||||
labels map[string]ast.Stmt
|
||||
|
@ -28,40 +72,12 @@ type deadState struct {
|
|||
reachable bool
|
||||
}
|
||||
|
||||
// checkUnreachable checks a function body for dead code.
|
||||
//
|
||||
// TODO(adonovan): use the new cfg package, which is more precise.
|
||||
func checkUnreachable(f *File, node ast.Node) {
|
||||
var body *ast.BlockStmt
|
||||
switch n := node.(type) {
|
||||
case *ast.FuncDecl:
|
||||
body = n.Body
|
||||
case *ast.FuncLit:
|
||||
body = n.Body
|
||||
}
|
||||
if body == nil {
|
||||
return
|
||||
}
|
||||
|
||||
d := &deadState{
|
||||
f: f,
|
||||
hasBreak: make(map[ast.Stmt]bool),
|
||||
hasGoto: make(map[string]bool),
|
||||
labels: make(map[string]ast.Stmt),
|
||||
}
|
||||
|
||||
d.findLabels(body)
|
||||
|
||||
d.reachable = true
|
||||
d.findDead(body)
|
||||
}
|
||||
|
||||
// findLabels gathers information about the labels defined and used by stmt
|
||||
// and about which statements break, whether a label is involved or not.
|
||||
func (d *deadState) findLabels(stmt ast.Stmt) {
|
||||
switch x := stmt.(type) {
|
||||
default:
|
||||
d.f.Warnf(x.Pos(), "internal error in findLabels: unexpected statement %T", x)
|
||||
log.Fatalf("%s: internal error in findLabels: unexpected statement %T", d.pass.Fset.Position(x.Pos()), x)
|
||||
|
||||
case *ast.AssignStmt,
|
||||
*ast.BadStmt,
|
||||
|
@ -173,14 +189,14 @@ func (d *deadState) findDead(stmt ast.Stmt) {
|
|||
case *ast.EmptyStmt:
|
||||
// do not warn about unreachable empty statements
|
||||
default:
|
||||
d.f.Bad(stmt.Pos(), "unreachable code")
|
||||
d.pass.Reportf(stmt.Pos(), "unreachable code")
|
||||
d.reachable = true // silence error about next statement
|
||||
}
|
||||
}
|
||||
|
||||
switch x := stmt.(type) {
|
||||
default:
|
||||
d.f.Warnf(x.Pos(), "internal error in findDead: unexpected statement %T", x)
|
||||
log.Fatalf("%s: internal error in findDead: unexpected statement %T", d.pass.Fset.Position(x.Pos()), x)
|
||||
|
||||
case *ast.AssignStmt,
|
||||
*ast.BadStmt,
|
130
vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go
generated
vendored
Normal file
130
vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go
generated
vendored
Normal file
|
@ -0,0 +1,130 @@
|
|||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package unsafeptr defines an Analyzer that checks for invalid
|
||||
// conversions of uintptr to unsafe.Pointer.
|
||||
package unsafeptr
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
const Doc = `check for invalid conversions of uintptr to unsafe.Pointer
|
||||
|
||||
The unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer
|
||||
to convert integers to pointers. A conversion from uintptr to
|
||||
unsafe.Pointer is invalid if it implies that there is a uintptr-typed
|
||||
word in memory that holds a pointer value, because that word will be
|
||||
invisible to stack copying and to the garbage collector.`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "unsafeptr",
|
||||
Doc: Doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.CallExpr)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
x := n.(*ast.CallExpr)
|
||||
if len(x.Args) != 1 {
|
||||
return
|
||||
}
|
||||
if hasBasicType(pass.TypesInfo, x.Fun, types.UnsafePointer) &&
|
||||
hasBasicType(pass.TypesInfo, x.Args[0], types.Uintptr) &&
|
||||
!isSafeUintptr(pass.TypesInfo, x.Args[0]) {
|
||||
pass.Reportf(x.Pos(), "possible misuse of unsafe.Pointer")
|
||||
}
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// isSafeUintptr reports whether x - already known to be a uintptr -
|
||||
// is safe to convert to unsafe.Pointer. It is safe if x is itself derived
|
||||
// directly from an unsafe.Pointer via conversion and pointer arithmetic
|
||||
// or if x is the result of reflect.Value.Pointer or reflect.Value.UnsafeAddr
|
||||
// or obtained from the Data field of a *reflect.SliceHeader or *reflect.StringHeader.
|
||||
func isSafeUintptr(info *types.Info, x ast.Expr) bool {
|
||||
switch x := x.(type) {
|
||||
case *ast.ParenExpr:
|
||||
return isSafeUintptr(info, x.X)
|
||||
|
||||
case *ast.SelectorExpr:
|
||||
if x.Sel.Name != "Data" {
|
||||
break
|
||||
}
|
||||
// reflect.SliceHeader and reflect.StringHeader are okay,
|
||||
// but only if they are pointing at a real slice or string.
|
||||
// It's not okay to do:
|
||||
// var x SliceHeader
|
||||
// x.Data = uintptr(unsafe.Pointer(...))
|
||||
// ... use x ...
|
||||
// p := unsafe.Pointer(x.Data)
|
||||
// because in the middle the garbage collector doesn't
|
||||
// see x.Data as a pointer and so x.Data may be dangling
|
||||
// by the time we get to the conversion at the end.
|
||||
// For now approximate by saying that *Header is okay
|
||||
// but Header is not.
|
||||
pt, ok := info.Types[x.X].Type.(*types.Pointer)
|
||||
if ok {
|
||||
t, ok := pt.Elem().(*types.Named)
|
||||
if ok && t.Obj().Pkg().Path() == "reflect" {
|
||||
switch t.Obj().Name() {
|
||||
case "StringHeader", "SliceHeader":
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case *ast.CallExpr:
|
||||
switch len(x.Args) {
|
||||
case 0:
|
||||
// maybe call to reflect.Value.Pointer or reflect.Value.UnsafeAddr.
|
||||
sel, ok := x.Fun.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
switch sel.Sel.Name {
|
||||
case "Pointer", "UnsafeAddr":
|
||||
t, ok := info.Types[sel.X].Type.(*types.Named)
|
||||
if ok && t.Obj().Pkg().Path() == "reflect" && t.Obj().Name() == "Value" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
case 1:
|
||||
// maybe conversion of uintptr to unsafe.Pointer
|
||||
return hasBasicType(info, x.Fun, types.Uintptr) &&
|
||||
hasBasicType(info, x.Args[0], types.UnsafePointer)
|
||||
}
|
||||
|
||||
case *ast.BinaryExpr:
|
||||
switch x.Op {
|
||||
case token.ADD, token.SUB, token.AND_NOT:
|
||||
return isSafeUintptr(info, x.X) && !isSafeUintptr(info, x.Y)
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// hasBasicType reports whether x's type is a types.Basic with the given kind.
|
||||
func hasBasicType(info *types.Info, x ast.Expr, kind types.BasicKind) bool {
|
||||
t := info.Types[x].Type
|
||||
if t != nil {
|
||||
t = t.Underlying()
|
||||
}
|
||||
b, ok := t.(*types.Basic)
|
||||
return ok && b.Kind() == kind
|
||||
}
|
131
vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go
generated
vendored
Normal file
131
vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go
generated
vendored
Normal file
|
@ -0,0 +1,131 @@
|
|||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package unusedresult defines an analyzer that checks for unused
|
||||
// results of calls to certain pure functions.
|
||||
package unusedresult
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/passes/inspect"
|
||||
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
)
|
||||
|
||||
// TODO(adonovan): make this analysis modular: export a mustUseResult
|
||||
// fact for each function that tail-calls one of the functions that we
|
||||
// check, and check those functions too.
|
||||
|
||||
const Doc = `check for unused results of calls to some functions
|
||||
|
||||
Some functions like fmt.Errorf return a result and have no side effects,
|
||||
so it is always a mistake to discard the result. This analyzer reports
|
||||
calls to certain functions in which the result of the call is ignored.
|
||||
|
||||
The set of functions may be controlled using flags.`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "unusedresult",
|
||||
Doc: Doc,
|
||||
Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
Run: run,
|
||||
}
|
||||
|
||||
// flags
|
||||
var funcs, stringMethods stringSetFlag
|
||||
|
||||
func init() {
|
||||
// TODO(adonovan): provide a comment syntax to allow users to
|
||||
// add their functions to this set using facts.
|
||||
funcs.Set("errors.New,fmt.Errorf,fmt.Sprintf,fmt.Sprint,sort.Reverse")
|
||||
Analyzer.Flags.Var(&funcs, "funcs",
|
||||
"comma-separated list of functions whose results must be used")
|
||||
|
||||
stringMethods.Set("Error,String")
|
||||
Analyzer.Flags.Var(&stringMethods, "stringmethods",
|
||||
"comma-separated list of names of methods of type func() string whose results must be used")
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
|
||||
nodeFilter := []ast.Node{
|
||||
(*ast.ExprStmt)(nil),
|
||||
}
|
||||
inspect.Preorder(nodeFilter, func(n ast.Node) {
|
||||
call, ok := analysisutil.Unparen(n.(*ast.ExprStmt).X).(*ast.CallExpr)
|
||||
if !ok {
|
||||
return // not a call statement
|
||||
}
|
||||
fun := analysisutil.Unparen(call.Fun)
|
||||
|
||||
if pass.TypesInfo.Types[fun].IsType() {
|
||||
return // a conversion, not a call
|
||||
}
|
||||
|
||||
selector, ok := fun.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
return // neither a method call nor a qualified ident
|
||||
}
|
||||
|
||||
sel, ok := pass.TypesInfo.Selections[selector]
|
||||
if ok && sel.Kind() == types.MethodVal {
|
||||
// method (e.g. foo.String())
|
||||
obj := sel.Obj().(*types.Func)
|
||||
sig := sel.Type().(*types.Signature)
|
||||
if types.Identical(sig, sigNoArgsStringResult) {
|
||||
if stringMethods[obj.Name()] {
|
||||
pass.Reportf(call.Lparen, "result of (%s).%s call not used",
|
||||
sig.Recv().Type(), obj.Name())
|
||||
}
|
||||
}
|
||||
} else if !ok {
|
||||
// package-qualified function (e.g. fmt.Errorf)
|
||||
obj := pass.TypesInfo.Uses[selector.Sel]
|
||||
if obj, ok := obj.(*types.Func); ok {
|
||||
qname := obj.Pkg().Path() + "." + obj.Name()
|
||||
if funcs[qname] {
|
||||
pass.Reportf(call.Lparen, "result of %v call not used", qname)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// func() string
|
||||
var sigNoArgsStringResult = types.NewSignature(nil, nil,
|
||||
types.NewTuple(types.NewVar(token.NoPos, nil, "", types.Typ[types.String])),
|
||||
false)
|
||||
|
||||
type stringSetFlag map[string]bool
|
||||
|
||||
func (ss *stringSetFlag) String() string {
|
||||
var items []string
|
||||
for item := range *ss {
|
||||
items = append(items, item)
|
||||
}
|
||||
sort.Strings(items)
|
||||
return strings.Join(items, ",")
|
||||
}
|
||||
|
||||
func (ss *stringSetFlag) Set(s string) error {
|
||||
m := make(map[string]bool) // clobber previous value
|
||||
if s != "" {
|
||||
for _, name := range strings.Split(s, ",") {
|
||||
if name == "" {
|
||||
continue // TODO: report error? proceed?
|
||||
}
|
||||
m[name] = true
|
||||
}
|
||||
}
|
||||
*ss = m
|
||||
return nil
|
||||
}
|
104
vendor/golang.org/x/tools/go/analysis/validate.go
generated
vendored
Normal file
104
vendor/golang.org/x/tools/go/analysis/validate.go
generated
vendored
Normal file
|
@ -0,0 +1,104 @@
|
|||
package analysis
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
// Validate reports an error if any of the analyzers are misconfigured.
|
||||
// Checks include:
|
||||
// that the name is a valid identifier;
|
||||
// that analyzer names are unique;
|
||||
// that the Requires graph is acylic;
|
||||
// that analyzer fact types are unique;
|
||||
// that each fact type is a pointer.
|
||||
func Validate(analyzers []*Analyzer) error {
|
||||
names := make(map[string]bool)
|
||||
|
||||
// Map each fact type to its sole generating analyzer.
|
||||
factTypes := make(map[reflect.Type]*Analyzer)
|
||||
|
||||
// Traverse the Requires graph, depth first.
|
||||
const (
|
||||
white = iota
|
||||
grey
|
||||
black
|
||||
finished
|
||||
)
|
||||
color := make(map[*Analyzer]uint8)
|
||||
var visit func(a *Analyzer) error
|
||||
visit = func(a *Analyzer) error {
|
||||
if a == nil {
|
||||
return fmt.Errorf("nil *Analyzer")
|
||||
}
|
||||
if color[a] == white {
|
||||
color[a] = grey
|
||||
|
||||
// names
|
||||
if !validIdent(a.Name) {
|
||||
return fmt.Errorf("invalid analyzer name %q", a)
|
||||
}
|
||||
if names[a.Name] {
|
||||
return fmt.Errorf("duplicate analyzer name %q", a)
|
||||
}
|
||||
names[a.Name] = true
|
||||
|
||||
if a.Doc == "" {
|
||||
return fmt.Errorf("analyzer %q is undocumented", a)
|
||||
}
|
||||
|
||||
// fact types
|
||||
for _, f := range a.FactTypes {
|
||||
if f == nil {
|
||||
return fmt.Errorf("analyzer %s has nil FactType", a)
|
||||
}
|
||||
t := reflect.TypeOf(f)
|
||||
if prev := factTypes[t]; prev != nil {
|
||||
return fmt.Errorf("fact type %s registered by two analyzers: %v, %v",
|
||||
t, a, prev)
|
||||
}
|
||||
if t.Kind() != reflect.Ptr {
|
||||
return fmt.Errorf("%s: fact type %s is not a pointer", a, t)
|
||||
}
|
||||
factTypes[t] = a
|
||||
}
|
||||
|
||||
// recursion
|
||||
for i, req := range a.Requires {
|
||||
if err := visit(req); err != nil {
|
||||
return fmt.Errorf("%s.Requires[%d]: %v", a.Name, i, err)
|
||||
}
|
||||
}
|
||||
color[a] = black
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
for _, a := range analyzers {
|
||||
if err := visit(a); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Reject duplicates among analyzers.
|
||||
// Precondition: color[a] == black.
|
||||
// Postcondition: color[a] == finished.
|
||||
for _, a := range analyzers {
|
||||
if color[a] == finished {
|
||||
return fmt.Errorf("duplicate analyzer: %s", a.Name)
|
||||
}
|
||||
color[a] = finished
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func validIdent(name string) bool {
|
||||
for i, r := range name {
|
||||
if !(r == '_' || unicode.IsLetter(r) || i > 0 && unicode.IsDigit(r)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return name != ""
|
||||
}
|
182
vendor/golang.org/x/tools/go/ast/inspector/inspector.go
generated
vendored
Normal file
182
vendor/golang.org/x/tools/go/ast/inspector/inspector.go
generated
vendored
Normal file
|
@ -0,0 +1,182 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package inspector provides helper functions for traversal over the
|
||||
// syntax trees of a package, including node filtering by type, and
|
||||
// materialization of the traversal stack.
|
||||
//
|
||||
// During construction, the inspector does a complete traversal and
|
||||
// builds a list of push/pop events and their node type. Subsequent
|
||||
// method calls that request a traversal scan this list, rather than walk
|
||||
// the AST, and perform type filtering using efficient bit sets.
|
||||
//
|
||||
// Experiments suggest the inspector's traversals are about 2.5x faster
|
||||
// than ast.Inspect, but it may take around 5 traversals for this
|
||||
// benefit to amortize the inspector's construction cost.
|
||||
// If efficiency is the primary concern, do not use Inspector for
|
||||
// one-off traversals.
|
||||
package inspector
|
||||
|
||||
// There are four orthogonal features in a traversal:
|
||||
// 1 type filtering
|
||||
// 2 pruning
|
||||
// 3 postorder calls to f
|
||||
// 4 stack
|
||||
// Rather than offer all of them in the API,
|
||||
// only a few combinations are exposed:
|
||||
// - Preorder is the fastest and has fewest features,
|
||||
// but is the most commonly needed traversal.
|
||||
// - Nodes and WithStack both provide pruning and postorder calls,
|
||||
// even though few clients need it, because supporting two versions
|
||||
// is not justified.
|
||||
// More combinations could be supported by expressing them as
|
||||
// wrappers around a more generic traversal, but this was measured
|
||||
// and found to degrade performance significantly (30%).
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
)
|
||||
|
||||
// An Inspector provides methods for inspecting
|
||||
// (traversing) the syntax trees of a package.
|
||||
type Inspector struct {
|
||||
events []event
|
||||
}
|
||||
|
||||
// New returns an Inspector for the specified syntax trees.
|
||||
func New(files []*ast.File) *Inspector {
|
||||
return &Inspector{traverse(files)}
|
||||
}
|
||||
|
||||
// An event represents a push or a pop
|
||||
// of an ast.Node during a traversal.
|
||||
type event struct {
|
||||
node ast.Node
|
||||
typ uint64 // typeOf(node)
|
||||
index int // 1 + index of corresponding pop event, or 0 if this is a pop
|
||||
}
|
||||
|
||||
// Preorder visits all the nodes of the files supplied to New in
|
||||
// depth-first order. It calls f(n) for each node n before it visits
|
||||
// n's children.
|
||||
//
|
||||
// The types argument, if non-empty, enables type-based filtering of
|
||||
// events. The function f if is called only for nodes whose type
|
||||
// matches an element of the types slice.
|
||||
func (in *Inspector) Preorder(types []ast.Node, f func(ast.Node)) {
|
||||
// Because it avoids postorder calls to f, and the pruning
|
||||
// check, Preorder is almost twice as fast as Nodes. The two
|
||||
// features seem to contribute similar slowdowns (~1.4x each).
|
||||
|
||||
mask := maskOf(types)
|
||||
for i := 0; i < len(in.events); {
|
||||
ev := in.events[i]
|
||||
if ev.typ&mask != 0 {
|
||||
if ev.index > 0 {
|
||||
f(ev.node)
|
||||
}
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
// Nodes visits the nodes of the files supplied to New in depth-first
|
||||
// order. It calls f(n, true) for each node n before it visits n's
|
||||
// children. If f returns true, Nodes invokes f recursively for each
|
||||
// of the non-nil children of the node, followed by a call of
|
||||
// f(n, false).
|
||||
//
|
||||
// The types argument, if non-empty, enables type-based filtering of
|
||||
// events. The function f if is called only for nodes whose type
|
||||
// matches an element of the types slice.
|
||||
func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (prune bool)) {
|
||||
mask := maskOf(types)
|
||||
for i := 0; i < len(in.events); {
|
||||
ev := in.events[i]
|
||||
if ev.typ&mask != 0 {
|
||||
if ev.index > 0 {
|
||||
// push
|
||||
if !f(ev.node, true) {
|
||||
i = ev.index // jump to corresponding pop + 1
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
// pop
|
||||
f(ev.node, false)
|
||||
}
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
// WithStack visits nodes in a similar manner to Nodes, but it
|
||||
// supplies each call to f an additional argument, the current
|
||||
// traversal stack. The stack's first element is the outermost node,
|
||||
// an *ast.File; its last is the innermost, n.
|
||||
func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, stack []ast.Node) (prune bool)) {
|
||||
mask := maskOf(types)
|
||||
var stack []ast.Node
|
||||
for i := 0; i < len(in.events); {
|
||||
ev := in.events[i]
|
||||
if ev.index > 0 {
|
||||
// push
|
||||
stack = append(stack, ev.node)
|
||||
if ev.typ&mask != 0 {
|
||||
if !f(ev.node, true, stack) {
|
||||
i = ev.index
|
||||
stack = stack[:len(stack)-1]
|
||||
continue
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// pop
|
||||
if ev.typ&mask != 0 {
|
||||
f(ev.node, false, stack)
|
||||
}
|
||||
stack = stack[:len(stack)-1]
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
// traverse builds the table of events representing a traversal.
|
||||
func traverse(files []*ast.File) []event {
|
||||
// Preallocate approximate number of events
|
||||
// based on source file extent.
|
||||
// This makes traverse faster by 4x (!).
|
||||
var extent int
|
||||
for _, f := range files {
|
||||
extent += int(f.End() - f.Pos())
|
||||
}
|
||||
// This estimate is based on the net/http package.
|
||||
events := make([]event, 0, extent*33/100)
|
||||
|
||||
var stack []event
|
||||
for _, f := range files {
|
||||
ast.Inspect(f, func(n ast.Node) bool {
|
||||
if n != nil {
|
||||
// push
|
||||
ev := event{
|
||||
node: n,
|
||||
typ: typeOf(n),
|
||||
index: len(events), // push event temporarily holds own index
|
||||
}
|
||||
stack = append(stack, ev)
|
||||
events = append(events, ev)
|
||||
} else {
|
||||
// pop
|
||||
ev := stack[len(stack)-1]
|
||||
stack = stack[:len(stack)-1]
|
||||
|
||||
events[ev.index].index = len(events) + 1 // make push refer to pop
|
||||
|
||||
ev.index = 0 // turn ev into a pop event
|
||||
events = append(events, ev)
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
return events
|
||||
}
|
216
vendor/golang.org/x/tools/go/ast/inspector/typeof.go
generated
vendored
Normal file
216
vendor/golang.org/x/tools/go/ast/inspector/typeof.go
generated
vendored
Normal file
|
@ -0,0 +1,216 @@
|
|||
package inspector
|
||||
|
||||
// This file defines func typeOf(ast.Node) uint64.
|
||||
//
|
||||
// The initial map-based implementation was too slow;
|
||||
// see https://go-review.googlesource.com/c/tools/+/135655/1/go/ast/inspector/inspector.go#196
|
||||
|
||||
import "go/ast"
|
||||
|
||||
const (
|
||||
nArrayType = iota
|
||||
nAssignStmt
|
||||
nBadDecl
|
||||
nBadExpr
|
||||
nBadStmt
|
||||
nBasicLit
|
||||
nBinaryExpr
|
||||
nBlockStmt
|
||||
nBranchStmt
|
||||
nCallExpr
|
||||
nCaseClause
|
||||
nChanType
|
||||
nCommClause
|
||||
nComment
|
||||
nCommentGroup
|
||||
nCompositeLit
|
||||
nDeclStmt
|
||||
nDeferStmt
|
||||
nEllipsis
|
||||
nEmptyStmt
|
||||
nExprStmt
|
||||
nField
|
||||
nFieldList
|
||||
nFile
|
||||
nForStmt
|
||||
nFuncDecl
|
||||
nFuncLit
|
||||
nFuncType
|
||||
nGenDecl
|
||||
nGoStmt
|
||||
nIdent
|
||||
nIfStmt
|
||||
nImportSpec
|
||||
nIncDecStmt
|
||||
nIndexExpr
|
||||
nInterfaceType
|
||||
nKeyValueExpr
|
||||
nLabeledStmt
|
||||
nMapType
|
||||
nPackage
|
||||
nParenExpr
|
||||
nRangeStmt
|
||||
nReturnStmt
|
||||
nSelectStmt
|
||||
nSelectorExpr
|
||||
nSendStmt
|
||||
nSliceExpr
|
||||
nStarExpr
|
||||
nStructType
|
||||
nSwitchStmt
|
||||
nTypeAssertExpr
|
||||
nTypeSpec
|
||||
nTypeSwitchStmt
|
||||
nUnaryExpr
|
||||
nValueSpec
|
||||
)
|
||||
|
||||
// typeOf returns a distinct single-bit value that represents the type of n.
|
||||
//
|
||||
// Various implementations were benchmarked with BenchmarkNewInspector:
|
||||
// GOGC=off
|
||||
// - type switch 4.9-5.5ms 2.1ms
|
||||
// - binary search over a sorted list of types 5.5-5.9ms 2.5ms
|
||||
// - linear scan, frequency-ordered list 5.9-6.1ms 2.7ms
|
||||
// - linear scan, unordered list 6.4ms 2.7ms
|
||||
// - hash table 6.5ms 3.1ms
|
||||
// A perfect hash seemed like overkill.
|
||||
//
|
||||
// The compiler's switch statement is the clear winner
|
||||
// as it produces a binary tree in code,
|
||||
// with constant conditions and good branch prediction.
|
||||
// (Sadly it is the most verbose in source code.)
|
||||
// Binary search suffered from poor branch prediction.
|
||||
//
|
||||
func typeOf(n ast.Node) uint64 {
|
||||
// Fast path: nearly half of all nodes are identifiers.
|
||||
if _, ok := n.(*ast.Ident); ok {
|
||||
return 1 << nIdent
|
||||
}
|
||||
|
||||
// These cases include all nodes encountered by ast.Inspect.
|
||||
switch n.(type) {
|
||||
case *ast.ArrayType:
|
||||
return 1 << nArrayType
|
||||
case *ast.AssignStmt:
|
||||
return 1 << nAssignStmt
|
||||
case *ast.BadDecl:
|
||||
return 1 << nBadDecl
|
||||
case *ast.BadExpr:
|
||||
return 1 << nBadExpr
|
||||
case *ast.BadStmt:
|
||||
return 1 << nBadStmt
|
||||
case *ast.BasicLit:
|
||||
return 1 << nBasicLit
|
||||
case *ast.BinaryExpr:
|
||||
return 1 << nBinaryExpr
|
||||
case *ast.BlockStmt:
|
||||
return 1 << nBlockStmt
|
||||
case *ast.BranchStmt:
|
||||
return 1 << nBranchStmt
|
||||
case *ast.CallExpr:
|
||||
return 1 << nCallExpr
|
||||
case *ast.CaseClause:
|
||||
return 1 << nCaseClause
|
||||
case *ast.ChanType:
|
||||
return 1 << nChanType
|
||||
case *ast.CommClause:
|
||||
return 1 << nCommClause
|
||||
case *ast.Comment:
|
||||
return 1 << nComment
|
||||
case *ast.CommentGroup:
|
||||
return 1 << nCommentGroup
|
||||
case *ast.CompositeLit:
|
||||
return 1 << nCompositeLit
|
||||
case *ast.DeclStmt:
|
||||
return 1 << nDeclStmt
|
||||
case *ast.DeferStmt:
|
||||
return 1 << nDeferStmt
|
||||
case *ast.Ellipsis:
|
||||
return 1 << nEllipsis
|
||||
case *ast.EmptyStmt:
|
||||
return 1 << nEmptyStmt
|
||||
case *ast.ExprStmt:
|
||||
return 1 << nExprStmt
|
||||
case *ast.Field:
|
||||
return 1 << nField
|
||||
case *ast.FieldList:
|
||||
return 1 << nFieldList
|
||||
case *ast.File:
|
||||
return 1 << nFile
|
||||
case *ast.ForStmt:
|
||||
return 1 << nForStmt
|
||||
case *ast.FuncDecl:
|
||||
return 1 << nFuncDecl
|
||||
case *ast.FuncLit:
|
||||
return 1 << nFuncLit
|
||||
case *ast.FuncType:
|
||||
return 1 << nFuncType
|
||||
case *ast.GenDecl:
|
||||
return 1 << nGenDecl
|
||||
case *ast.GoStmt:
|
||||
return 1 << nGoStmt
|
||||
case *ast.Ident:
|
||||
return 1 << nIdent
|
||||
case *ast.IfStmt:
|
||||
return 1 << nIfStmt
|
||||
case *ast.ImportSpec:
|
||||
return 1 << nImportSpec
|
||||
case *ast.IncDecStmt:
|
||||
return 1 << nIncDecStmt
|
||||
case *ast.IndexExpr:
|
||||
return 1 << nIndexExpr
|
||||
case *ast.InterfaceType:
|
||||
return 1 << nInterfaceType
|
||||
case *ast.KeyValueExpr:
|
||||
return 1 << nKeyValueExpr
|
||||
case *ast.LabeledStmt:
|
||||
return 1 << nLabeledStmt
|
||||
case *ast.MapType:
|
||||
return 1 << nMapType
|
||||
case *ast.Package:
|
||||
return 1 << nPackage
|
||||
case *ast.ParenExpr:
|
||||
return 1 << nParenExpr
|
||||
case *ast.RangeStmt:
|
||||
return 1 << nRangeStmt
|
||||
case *ast.ReturnStmt:
|
||||
return 1 << nReturnStmt
|
||||
case *ast.SelectStmt:
|
||||
return 1 << nSelectStmt
|
||||
case *ast.SelectorExpr:
|
||||
return 1 << nSelectorExpr
|
||||
case *ast.SendStmt:
|
||||
return 1 << nSendStmt
|
||||
case *ast.SliceExpr:
|
||||
return 1 << nSliceExpr
|
||||
case *ast.StarExpr:
|
||||
return 1 << nStarExpr
|
||||
case *ast.StructType:
|
||||
return 1 << nStructType
|
||||
case *ast.SwitchStmt:
|
||||
return 1 << nSwitchStmt
|
||||
case *ast.TypeAssertExpr:
|
||||
return 1 << nTypeAssertExpr
|
||||
case *ast.TypeSpec:
|
||||
return 1 << nTypeSpec
|
||||
case *ast.TypeSwitchStmt:
|
||||
return 1 << nTypeSwitchStmt
|
||||
case *ast.UnaryExpr:
|
||||
return 1 << nUnaryExpr
|
||||
case *ast.ValueSpec:
|
||||
return 1 << nValueSpec
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func maskOf(nodes []ast.Node) uint64 {
|
||||
if nodes == nil {
|
||||
return 1<<64 - 1 // match all node types
|
||||
}
|
||||
var mask uint64
|
||||
for _, n := range nodes {
|
||||
mask |= typeOf(n)
|
||||
}
|
||||
return mask
|
||||
}
|
|
@ -42,7 +42,7 @@ start:
|
|||
b.add(s)
|
||||
if call, ok := s.X.(*ast.CallExpr); ok && !b.mayReturn(call) {
|
||||
// Calls to panic, os.Exit, etc, never return.
|
||||
b.current = b.newUnreachableBlock("unreachable.call")
|
||||
b.current = b.newBlock("unreachable.call")
|
||||
}
|
||||
|
||||
case *ast.DeclStmt:
|
||||
|
@ -65,48 +65,10 @@ start:
|
|||
|
||||
case *ast.ReturnStmt:
|
||||
b.add(s)
|
||||
b.current = b.newUnreachableBlock("unreachable.return")
|
||||
b.current = b.newBlock("unreachable.return")
|
||||
|
||||
case *ast.BranchStmt:
|
||||
var block *Block
|
||||
switch s.Tok {
|
||||
case token.BREAK:
|
||||
if s.Label != nil {
|
||||
if lb := b.labeledBlock(s.Label); lb != nil {
|
||||
block = lb._break
|
||||
}
|
||||
} else {
|
||||
for t := b.targets; t != nil && block == nil; t = t.tail {
|
||||
block = t._break
|
||||
}
|
||||
}
|
||||
|
||||
case token.CONTINUE:
|
||||
if s.Label != nil {
|
||||
if lb := b.labeledBlock(s.Label); lb != nil {
|
||||
block = lb._continue
|
||||
}
|
||||
} else {
|
||||
for t := b.targets; t != nil && block == nil; t = t.tail {
|
||||
block = t._continue
|
||||
}
|
||||
}
|
||||
|
||||
case token.FALLTHROUGH:
|
||||
for t := b.targets; t != nil; t = t.tail {
|
||||
block = t._fallthrough
|
||||
}
|
||||
|
||||
case token.GOTO:
|
||||
if s.Label != nil {
|
||||
block = b.labeledBlock(s.Label)._goto
|
||||
}
|
||||
}
|
||||
if block == nil {
|
||||
block = b.newBlock("undefined.branch")
|
||||
}
|
||||
b.jump(block)
|
||||
b.current = b.newUnreachableBlock("unreachable.branch")
|
||||
b.branchStmt(s)
|
||||
|
||||
case *ast.BlockStmt:
|
||||
b.stmtList(s.List)
|
||||
|
@ -161,6 +123,48 @@ func (b *builder) stmtList(list []ast.Stmt) {
|
|||
}
|
||||
}
|
||||
|
||||
func (b *builder) branchStmt(s *ast.BranchStmt) {
|
||||
var block *Block
|
||||
switch s.Tok {
|
||||
case token.BREAK:
|
||||
if s.Label != nil {
|
||||
if lb := b.labeledBlock(s.Label); lb != nil {
|
||||
block = lb._break
|
||||
}
|
||||
} else {
|
||||
for t := b.targets; t != nil && block == nil; t = t.tail {
|
||||
block = t._break
|
||||
}
|
||||
}
|
||||
|
||||
case token.CONTINUE:
|
||||
if s.Label != nil {
|
||||
if lb := b.labeledBlock(s.Label); lb != nil {
|
||||
block = lb._continue
|
||||
}
|
||||
} else {
|
||||
for t := b.targets; t != nil && block == nil; t = t.tail {
|
||||
block = t._continue
|
||||
}
|
||||
}
|
||||
|
||||
case token.FALLTHROUGH:
|
||||
for t := b.targets; t != nil; t = t.tail {
|
||||
block = t._fallthrough
|
||||
}
|
||||
|
||||
case token.GOTO:
|
||||
if s.Label != nil {
|
||||
block = b.labeledBlock(s.Label)._goto
|
||||
}
|
||||
}
|
||||
if block == nil {
|
||||
block = b.newBlock("undefined.branch")
|
||||
}
|
||||
b.jump(block)
|
||||
b.current = b.newBlock("unreachable.branch")
|
||||
}
|
||||
|
||||
func (b *builder) switchStmt(s *ast.SwitchStmt, label *lblock) {
|
||||
if s.Init != nil {
|
||||
b.stmt(s.Init)
|
||||
|
@ -479,7 +483,7 @@ func (b *builder) labeledBlock(label *ast.Ident) *lblock {
|
|||
func (b *builder) newBlock(comment string) *Block {
|
||||
g := b.cfg
|
||||
block := &Block{
|
||||
index: int32(len(g.Blocks)),
|
||||
Index: int32(len(g.Blocks)),
|
||||
comment: comment,
|
||||
}
|
||||
block.Succs = block.succs2[:0]
|
||||
|
@ -487,12 +491,6 @@ func (b *builder) newBlock(comment string) *Block {
|
|||
return block
|
||||
}
|
||||
|
||||
func (b *builder) newUnreachableBlock(comment string) *Block {
|
||||
block := b.newBlock(comment)
|
||||
block.unreachable = true
|
||||
return block
|
||||
}
|
||||
|
||||
func (b *builder) add(n ast.Node) {
|
||||
b.current.Nodes = append(b.current.Nodes, n)
|
||||
}
|
|
@ -39,11 +39,6 @@
|
|||
//
|
||||
package cfg
|
||||
|
||||
// Although the vet tool has type information, it is often extremely
|
||||
// fragmentary, so for simplicity this package does not depend on
|
||||
// go/types. Consequently control-flow conditions are ignored even
|
||||
// when constant, and "mayReturn" information must be provided by the
|
||||
// client.
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
@ -68,11 +63,11 @@ type CFG struct {
|
|||
type Block struct {
|
||||
Nodes []ast.Node // statements, expressions, and ValueSpecs
|
||||
Succs []*Block // successor nodes in the graph
|
||||
Index int32 // index within CFG.Blocks
|
||||
Live bool // block is reachable from entry
|
||||
|
||||
comment string // for debugging
|
||||
index int32 // index within CFG.Blocks
|
||||
unreachable bool // is block of stmts following return/panic/for{}
|
||||
succs2 [2]*Block // underlying array for Succs
|
||||
comment string // for debugging
|
||||
succs2 [2]*Block // underlying array for Succs
|
||||
}
|
||||
|
||||
// New returns a new control-flow graph for the specified function body,
|
||||
|
@ -91,9 +86,22 @@ func New(body *ast.BlockStmt, mayReturn func(*ast.CallExpr) bool) *CFG {
|
|||
b.current = b.newBlock("entry")
|
||||
b.stmt(body)
|
||||
|
||||
// Compute liveness (reachability from entry point), breadth-first.
|
||||
q := make([]*Block, 0, len(b.cfg.Blocks))
|
||||
q = append(q, b.cfg.Blocks[0]) // entry point
|
||||
for len(q) > 0 {
|
||||
b := q[len(q)-1]
|
||||
q = q[:len(q)-1]
|
||||
|
||||
if !b.Live {
|
||||
b.Live = true
|
||||
q = append(q, b.Succs...)
|
||||
}
|
||||
}
|
||||
|
||||
// Does control fall off the end of the function's body?
|
||||
// Make implicit return explicit.
|
||||
if b.current != nil && !b.current.unreachable {
|
||||
if b.current != nil && b.current.Live {
|
||||
b.add(&ast.ReturnStmt{
|
||||
Return: body.End() - 1,
|
||||
})
|
||||
|
@ -103,7 +111,7 @@ func New(body *ast.BlockStmt, mayReturn func(*ast.CallExpr) bool) *CFG {
|
|||
}
|
||||
|
||||
func (b *Block) String() string {
|
||||
return fmt.Sprintf("block %d (%s)", b.index, b.comment)
|
||||
return fmt.Sprintf("block %d (%s)", b.Index, b.comment)
|
||||
}
|
||||
|
||||
// Return returns the return statement at the end of this block if present, nil otherwise.
|
||||
|
@ -118,14 +126,14 @@ func (b *Block) Return() (ret *ast.ReturnStmt) {
|
|||
func (g *CFG) Format(fset *token.FileSet) string {
|
||||
var buf bytes.Buffer
|
||||
for _, b := range g.Blocks {
|
||||
fmt.Fprintf(&buf, ".%d: # %s\n", b.index, b.comment)
|
||||
fmt.Fprintf(&buf, ".%d: # %s\n", b.Index, b.comment)
|
||||
for _, n := range b.Nodes {
|
||||
fmt.Fprintf(&buf, "\t%s\n", formatNode(fset, n))
|
||||
}
|
||||
if len(b.Succs) > 0 {
|
||||
fmt.Fprintf(&buf, "\tsuccs:")
|
||||
for _, succ := range b.Succs {
|
||||
fmt.Fprintf(&buf, " %d", succ.index)
|
||||
fmt.Fprintf(&buf, " %d", succ.Index)
|
||||
}
|
||||
buf.WriteByte('\n')
|
||||
}
|
8
vendor/golang.org/x/tools/imports/mod.go
generated
vendored
8
vendor/golang.org/x/tools/imports/mod.go
generated
vendored
|
@ -24,6 +24,7 @@ import (
|
|||
type moduleResolver struct {
|
||||
env *fixEnv
|
||||
|
||||
initialized bool
|
||||
main *moduleJSON
|
||||
modsByModPath []*moduleJSON // All modules, ordered by # of path components in module Path...
|
||||
modsByDir []*moduleJSON // ...or Dir.
|
||||
|
@ -48,7 +49,7 @@ type moduleErrorJSON struct {
|
|||
}
|
||||
|
||||
func (r *moduleResolver) init() error {
|
||||
if r.main != nil {
|
||||
if r.initialized {
|
||||
return nil
|
||||
}
|
||||
stdout, err := r.env.invokeGo("list", "-m", "-json", "...")
|
||||
|
@ -87,6 +88,7 @@ func (r *moduleResolver) init() error {
|
|||
return count(j) < count(i) // descending order
|
||||
})
|
||||
|
||||
r.initialized = true
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -202,7 +204,9 @@ func (r *moduleResolver) scan(_ references) ([]*pkg, error) {
|
|||
// Walk GOROOT, GOPATH/pkg/mod, and the main module.
|
||||
roots := []gopathwalk.Root{
|
||||
{filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT},
|
||||
{r.main.Dir, gopathwalk.RootCurrentModule},
|
||||
}
|
||||
if r.main != nil {
|
||||
roots = append(roots, gopathwalk.Root{r.main.Dir, gopathwalk.RootCurrentModule})
|
||||
}
|
||||
for _, p := range filepath.SplitList(r.env.GOPATH) {
|
||||
roots = append(roots, gopathwalk.Root{filepath.Join(p, "/pkg/mod"), gopathwalk.RootModuleCache})
|
||||
|
|
44
vendor/modules.txt
vendored
44
vendor/modules.txt
vendored
|
@ -92,10 +92,6 @@ github.com/golangci/gofmt/goimports
|
|||
# github.com/golangci/gosec v0.0.0-20180901114220-66fb7fc33547
|
||||
github.com/golangci/gosec
|
||||
github.com/golangci/gosec/rules
|
||||
# github.com/golangci/govet v0.0.0-20180818181408-44ddbe260190
|
||||
github.com/golangci/govet
|
||||
github.com/golangci/govet/lib/cfg
|
||||
github.com/golangci/govet/lib/whitelist
|
||||
# github.com/golangci/ineffassign v0.0.0-20180808204949-2ee8f2867dde
|
||||
github.com/golangci/ineffassign
|
||||
# github.com/golangci/lint-1 v0.0.0-20180610141402-4bf9709227d1
|
||||
|
@ -192,20 +188,50 @@ golang.org/x/sys/windows
|
|||
golang.org/x/text/width
|
||||
golang.org/x/text/transform
|
||||
golang.org/x/text/unicode/norm
|
||||
# golang.org/x/tools v0.0.0-20190314010720-1286b2016bb1
|
||||
golang.org/x/tools/go/packages
|
||||
# golang.org/x/tools v0.0.0-20190314010720-f0bfdbff1f9c
|
||||
golang.org/x/tools/go/analysis
|
||||
golang.org/x/tools/go/analysis/passes/asmdecl
|
||||
golang.org/x/tools/go/analysis/passes/assign
|
||||
golang.org/x/tools/go/analysis/passes/atomic
|
||||
golang.org/x/tools/go/analysis/passes/atomicalign
|
||||
golang.org/x/tools/go/analysis/passes/bools
|
||||
golang.org/x/tools/go/analysis/passes/buildtag
|
||||
golang.org/x/tools/go/analysis/passes/cgocall
|
||||
golang.org/x/tools/go/analysis/passes/composite
|
||||
golang.org/x/tools/go/analysis/passes/copylock
|
||||
golang.org/x/tools/go/analysis/passes/httpresponse
|
||||
golang.org/x/tools/go/analysis/passes/loopclosure
|
||||
golang.org/x/tools/go/analysis/passes/lostcancel
|
||||
golang.org/x/tools/go/analysis/passes/nilfunc
|
||||
golang.org/x/tools/go/analysis/passes/printf
|
||||
golang.org/x/tools/go/analysis/passes/shadow
|
||||
golang.org/x/tools/go/analysis/passes/shift
|
||||
golang.org/x/tools/go/analysis/passes/stdmethods
|
||||
golang.org/x/tools/go/analysis/passes/structtag
|
||||
golang.org/x/tools/go/analysis/passes/tests
|
||||
golang.org/x/tools/go/analysis/passes/unmarshal
|
||||
golang.org/x/tools/go/analysis/passes/unreachable
|
||||
golang.org/x/tools/go/analysis/passes/unsafeptr
|
||||
golang.org/x/tools/go/analysis/passes/unusedresult
|
||||
golang.org/x/tools/go/loader
|
||||
golang.org/x/tools/go/packages
|
||||
golang.org/x/tools/imports
|
||||
golang.org/x/tools/go/analysis/passes/buildssa
|
||||
golang.org/x/tools/go/ssa
|
||||
golang.org/x/tools/go/ssa/ssautil
|
||||
golang.org/x/tools/go/ast/astutil
|
||||
golang.org/x/tools/go/types/typeutil
|
||||
golang.org/x/tools/go/gcexportdata
|
||||
golang.org/x/tools/go/analysis/passes/internal/analysisutil
|
||||
golang.org/x/tools/go/analysis/passes/inspect
|
||||
golang.org/x/tools/go/ast/inspector
|
||||
golang.org/x/tools/go/analysis/passes/ctrlflow
|
||||
golang.org/x/tools/go/cfg
|
||||
golang.org/x/tools/go/buildutil
|
||||
golang.org/x/tools/go/internal/cgo
|
||||
golang.org/x/tools/go/internal/packagesdriver
|
||||
golang.org/x/tools/internal/gopathwalk
|
||||
golang.org/x/tools/internal/semver
|
||||
golang.org/x/tools/go/ast/astutil
|
||||
golang.org/x/tools/go/types/typeutil
|
||||
golang.org/x/tools/go/buildutil
|
||||
golang.org/x/tools/internal/module
|
||||
golang.org/x/tools/go/internal/gcimporter
|
||||
golang.org/x/tools/internal/fastwalk
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue