mirror of
https://github.com/scratchfoundation/golangci-lint.git
synced 2025-07-28 23:19:57 -04:00
Update megacheck to the latest version
Also do following improvements: - show proper sublinter name for megacheck sublinters - refactor and make more simple and robust megacheck merging/optimizing - improve handling of unknown linter names in //nolint directives - minimize diff of our megacheck version from the upstream, https://github.com/golang/go/issues/29612 blocks usage of the upstream version - support the new `stylecheck` linter - improve tests coverage for megacheck and nolint related cases - update and use upstream versions of unparam and interfacer instead of forked ones - don't use golangci/tools repo anymore - fix newly found issues after updating linters Also should be noted that megacheck works much faster and consumes less memory in the newest release, therefore golangci-lint works noticeably faster and consumes less memory for large repos. Relates: #314
This commit is contained in:
parent
93b2d10537
commit
7705f82591
150 changed files with 17847 additions and 2006 deletions
README.mdgo.modgo.sum
pkg
test
enabled_linters_test.go
testdata
vendor/github.com
BurntSushi/toml
.gitignore.travis.ymlCOMPATIBLECOPYINGMakefileREADME.mddecode.godecode_meta.godoc.goencode.goencoding_types.goencoding_types_1.1.golex.goparse.gosession.vimtype_check.gotype_fields.go
golangci/go-tools
arg
callgraph
config
functions
internal/sharedcheck
lint
simple
ssa
LICENSEblockopt.gobuilder.goconst.gocreate.godoc.godom.goemit.gofunc.goidentical.goidentical_17.golift.golvalue.gomethods.gomode.goprint.gosanity.gosource.gossa.go
ssautil
testmain.goutil.gowrappers.gowrite.gossautil
staticcheck
|
@ -166,7 +166,7 @@ GolangCI-Lint can be used with zero configuration. By default the following lint
|
|||
```bash
|
||||
$ golangci-lint help linters
|
||||
Enabled by default linters:
|
||||
govet: Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string [fast: true]
|
||||
govet (vet, vetshadow): Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string [fast: true]
|
||||
errcheck: Errcheck is a program for checking for unchecked errors in go programs. These unchecked errors can be critical bugs in some cases [fast: true]
|
||||
staticcheck: Staticcheck is a go vet on steroids, applying a ton of static analysis checks [fast: false]
|
||||
unused: Checks Go code for unused constants, variables, functions and types [fast: false]
|
||||
|
@ -185,6 +185,7 @@ $ golangci-lint help linters
|
|||
...
|
||||
Disabled by default linters:
|
||||
golint: Golint differs from gofmt. Gofmt reformats Go source code, whereas golint prints out style mistakes [fast: true]
|
||||
stylecheck: Stylecheck is a replacement for golint [fast: false]
|
||||
gosec (gas): Inspects source code for security problems [fast: true]
|
||||
interfacer: Linter that suggests narrower interface types [fast: false]
|
||||
unconvert: Remove unnecessary type conversions [fast: true]
|
||||
|
@ -194,7 +195,6 @@ gocyclo: Computes and checks the cyclomatic complexity of functions [fast: true]
|
|||
gofmt: Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification [fast: true]
|
||||
goimports: Goimports does everything that gofmt does. Additionally it checks unused imports [fast: true]
|
||||
maligned: Tool to detect Go structs that would take less memory if their fields were sorted [fast: true]
|
||||
megacheck: 3 sub-linters in one: unused, gosimple and staticcheck [fast: false]
|
||||
depguard: Go linter that checks if package imports are in a list of acceptable packages [fast: true]
|
||||
misspell: Finds commonly misspelled English words in comments [fast: true]
|
||||
lll: Reports long lines [fast: true]
|
||||
|
@ -388,6 +388,7 @@ golangci-lint help linters
|
|||
### Disabled By Default Linters (`-E/--enable`)
|
||||
|
||||
- [golint](https://github.com/golang/lint) - Golint differs from gofmt. Gofmt reformats Go source code, whereas golint prints out style mistakes
|
||||
- [stylecheck](https://github.com/dominikh/go-tools/tree/master/stylecheck) - Stylecheck is a replacement for golint
|
||||
- [gosec](https://github.com/securego/gosec) - Inspects source code for security problems
|
||||
- [interfacer](https://github.com/mvdan/interfacer) - Linter that suggests narrower interface types
|
||||
- [unconvert](https://github.com/mdempsky/unconvert) - Remove unnecessary type conversions
|
||||
|
@ -397,7 +398,6 @@ golangci-lint help linters
|
|||
- [gofmt](https://golang.org/cmd/gofmt/) - Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification
|
||||
- [goimports](https://godoc.org/golang.org/x/tools/cmd/goimports) - Goimports does everything that gofmt does. Additionally it checks unused imports
|
||||
- [maligned](https://github.com/mdempsky/maligned) - Tool to detect Go structs that would take less memory if their fields were sorted
|
||||
- [megacheck](https://github.com/dominikh/go-tools/tree/master/cmd/megacheck) - 3 sub-linters in one: unused, gosimple and staticcheck
|
||||
- [depguard](https://github.com/OpenPeeDeeP/depguard) - Go linter that checks if package imports are in a list of acceptable packages
|
||||
- [misspell](https://github.com/client9/misspell) - Finds commonly misspelled English words in comments
|
||||
- [lll](https://github.com/walle/lll) - Reports long lines
|
||||
|
@ -461,7 +461,7 @@ Flags:
|
|||
# govet: Common false positives
|
||||
- (possible misuse of unsafe.Pointer|should have signature)
|
||||
|
||||
# megacheck: Developers tend to write in C-style with an explicit 'break' in a 'switch', so it's ok to ignore
|
||||
# staticcheck: Developers tend to write in C-style with an explicit 'break' in a 'switch', so it's ok to ignore
|
||||
- ineffective break statement. Did you mean to break out of the outer loop
|
||||
|
||||
# gosec: Too many false-positives on 'unsafe' usage
|
||||
|
|
9
go.mod
9
go.mod
|
@ -21,23 +21,19 @@ require (
|
|||
github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a
|
||||
github.com/golangci/errcheck v0.0.0-20181003203344-ef45e06d44b6
|
||||
github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613
|
||||
github.com/golangci/go-tools v0.0.0-20180902103155-93eecd106a0b
|
||||
github.com/golangci/go-tools v0.0.0-20180109140146-be4842e24b95
|
||||
github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3
|
||||
github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee
|
||||
github.com/golangci/gofmt v0.0.0-20181105071733-0b8337e80d98
|
||||
github.com/golangci/gosec v0.0.0-20180901114220-8afd9cbb6cfb
|
||||
github.com/golangci/govet v0.0.0-20180818181408-44ddbe260190
|
||||
github.com/golangci/ineffassign v0.0.0-20180808204949-2ee8f2867dde
|
||||
github.com/golangci/interfacer v0.0.0-20180902080945-01958817a6ec
|
||||
github.com/golangci/lint v0.0.0-20180902080404-c2187e7932b5 // indirect
|
||||
github.com/golangci/lint-1 v0.0.0-20180610141402-4bf9709227d1
|
||||
github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca
|
||||
github.com/golangci/misspell v0.0.0-20180809174111-950f5d19e770
|
||||
github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21
|
||||
github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0
|
||||
github.com/golangci/tools v0.0.0-20180902102414-2cefd77fef9b
|
||||
github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4
|
||||
github.com/golangci/unparam v0.0.0-20180902112548-7ad9dbcccc16
|
||||
github.com/hashicorp/hcl v0.0.0-20180404174102-ef8a98b0bbce // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||
github.com/magiconair/properties v1.7.6 // indirect
|
||||
|
@ -68,6 +64,9 @@ require (
|
|||
gopkg.in/airbrake/gobrake.v2 v2.0.9 // indirect
|
||||
gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2 // indirect
|
||||
gopkg.in/yaml.v2 v2.2.1
|
||||
mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed
|
||||
mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b // indirect
|
||||
mvdan.cc/unparam v0.0.0-20181201214637-68701730a1d7
|
||||
sourcegraph.com/sourcegraph/go-diff v0.0.0-20171119081133-3f415a150aec
|
||||
sourcegraph.com/sqs/pbtypes v0.0.0-20160107090929-4d1b9dc7ffc3 // indirect
|
||||
)
|
||||
|
|
21
go.sum
21
go.sum
|
@ -49,8 +49,8 @@ github.com/golangci/errcheck v0.0.0-20181003203344-ef45e06d44b6 h1:i2jIkQFb8RG45
|
|||
github.com/golangci/errcheck v0.0.0-20181003203344-ef45e06d44b6/go.mod h1:DbHgvLiFKX1Sh2T1w8Q/h4NAI8MHIpzCdnBUDTXU3I0=
|
||||
github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 h1:WadunOE/TeHR8U7f0TXiJACHeU3cuFOXuKafw4rozqU=
|
||||
github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8=
|
||||
github.com/golangci/go-tools v0.0.0-20180902103155-93eecd106a0b h1:FSrt9JBK7JINu5UobyIF6epfpjL66H+67KZoTbE0zwk=
|
||||
github.com/golangci/go-tools v0.0.0-20180902103155-93eecd106a0b/go.mod h1:unzUULGw35sjyOYjUt0jMTXqHlZPpPc6e+xfO4cd6mM=
|
||||
github.com/golangci/go-tools v0.0.0-20180109140146-be4842e24b95 h1:msnLojqdJ37Bszm2D+srIkHJZvTKPFQWuyJAeMU6Ilo=
|
||||
github.com/golangci/go-tools v0.0.0-20180109140146-be4842e24b95/go.mod h1:unzUULGw35sjyOYjUt0jMTXqHlZPpPc6e+xfO4cd6mM=
|
||||
github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3 h1:pe9JHs3cHHDQgOFXJJdYkK6fLz2PWyYtP4hthoCMvs8=
|
||||
github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3/go.mod h1:JXrF4TWy4tXYn62/9x8Wm/K/dm06p8tCKwFRDPZG/1o=
|
||||
github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee h1:J2XAy40+7yz70uaOiMbNnluTg7gyQhtGqLQncQh+4J8=
|
||||
|
@ -63,11 +63,6 @@ github.com/golangci/govet v0.0.0-20180818181408-44ddbe260190 h1:SLIgprnxQNjBpkz5
|
|||
github.com/golangci/govet v0.0.0-20180818181408-44ddbe260190/go.mod h1:pPwb+AK755h3/r73avHz5bEN6sa51/2HEZlLaV53hCo=
|
||||
github.com/golangci/ineffassign v0.0.0-20180808204949-2ee8f2867dde h1:qEGp3ZF1Qw6TkbWKn6GdJ12Ssu/CpJBaBcJ4hrUjrSo=
|
||||
github.com/golangci/ineffassign v0.0.0-20180808204949-2ee8f2867dde/go.mod h1:e5tpTHCfVze+7EpLEozzMB3eafxo2KT5veNg1k6byQU=
|
||||
github.com/golangci/interfacer v0.0.0-20180902080945-01958817a6ec h1:rvg392QhtAd/yOevPRX4wzYMIDYyq99j9MV+Mb1uVHs=
|
||||
github.com/golangci/interfacer v0.0.0-20180902080945-01958817a6ec/go.mod h1:yBorupihJ5OYDFE7/EZwrslyNyZaaidqqVptYTcNxnk=
|
||||
github.com/golangci/lint v0.0.0-20170908181259-c2187e7932b5/go.mod h1:zs8jPuoOp76KrjiydDqO3CGeS4v9gq77HNNiYcxxTGw=
|
||||
github.com/golangci/lint v0.0.0-20180902080404-c2187e7932b5 h1:9NYm50bkzER4RayDaggNjxF5kesUJREASyFgk4AcIis=
|
||||
github.com/golangci/lint v0.0.0-20180902080404-c2187e7932b5/go.mod h1:zs8jPuoOp76KrjiydDqO3CGeS4v9gq77HNNiYcxxTGw=
|
||||
github.com/golangci/lint-1 v0.0.0-20180610141402-4bf9709227d1 h1:PHK2kIh21Zt4IcG0bBRzQwEDVKF64LnkoSXnm8lfJUk=
|
||||
github.com/golangci/lint-1 v0.0.0-20180610141402-4bf9709227d1/go.mod h1:/X8TswGSh1pIozq4ZwCfxS0WA5JGXguxk94ar/4c87Y=
|
||||
github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA=
|
||||
|
@ -78,12 +73,8 @@ github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21 h1:leSNB7iYzLYSS
|
|||
github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21/go.mod h1:tf5+bzsHdTM0bsB7+8mt0GUMvjCgwLpTapNZHU8AajI=
|
||||
github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0 h1:HVfrLniijszjS1aiNg8JbBMO2+E1WIQ+j/gL4SQqGPg=
|
||||
github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4=
|
||||
github.com/golangci/tools v0.0.0-20180902102414-2cefd77fef9b h1:3hI7NZ9D3edEBVbN6V1urHWbFKJfcIlOFvX5m10jB88=
|
||||
github.com/golangci/tools v0.0.0-20180902102414-2cefd77fef9b/go.mod h1:zgj6NOYXOC1cexsdtDceI4/mj3aXK4JOVg9AV3C5LWI=
|
||||
github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys=
|
||||
github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ=
|
||||
github.com/golangci/unparam v0.0.0-20180902112548-7ad9dbcccc16 h1:QURX/XMP2uJUzzEvfJ291v1snmbJuyznAJLSQVnPyko=
|
||||
github.com/golangci/unparam v0.0.0-20180902112548-7ad9dbcccc16/go.mod h1:KW2L33j82vo0S0U6RP6uUQSuat+0Q457Yf+1mXC98/M=
|
||||
github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/hashicorp/hcl v0.0.0-20180404174102-ef8a98b0bbce h1:xdsDDbiBDQTKASoGEZ+pEmF1OnWuu8AQ9I8iNbHNeno=
|
||||
|
@ -154,8 +145,8 @@ golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5h
|
|||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20180826000951-f6ba57429505/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20181201035826-d0ca3933b724/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20181220024903-92cdcd90bf52 h1:oOIe9Zzq27JsS/3ACpGF1HwWnWNflZWT/3EvM7mtcEk=
|
||||
golang.org/x/tools v0.0.0-20181220024903-92cdcd90bf52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
gopkg.in/airbrake/gobrake.v2 v2.0.9 h1:7z2uVWwn7oVeeugY1DtlPAy5H+KYgB1KeKTnqjNatLo=
|
||||
|
@ -170,6 +161,12 @@ gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkep
|
|||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||
gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE=
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I=
|
||||
mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc=
|
||||
mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo=
|
||||
mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4=
|
||||
mvdan.cc/unparam v0.0.0-20181201214637-68701730a1d7 h1:AHc3hAmhXTHwSA40I4CJW2w2iirqwLzZuFgC6LdbtJk=
|
||||
mvdan.cc/unparam v0.0.0-20181201214637-68701730a1d7/go.mod h1:N4YHaPPCFjRU1vNrla2C9eoyqLxMaHmrsI8Th4iuQMY=
|
||||
sourcegraph.com/sourcegraph/go-diff v0.0.0-20171119081133-3f415a150aec h1:wAAdENPXC7bE1oxY4VqSDdhaA+XQ8TgQHsZMMnrXjEk=
|
||||
sourcegraph.com/sourcegraph/go-diff v0.0.0-20171119081133-3f415a150aec/go.mod h1:R09mWeb9JcPbO+A3cYDc11xjz0wp6r9+KnqdqROAoRU=
|
||||
sourcegraph.com/sqs/pbtypes v0.0.0-20160107090929-4d1b9dc7ffc3 h1:hXy8YsgVLDz5mlngKhNHQhAsAGrSp3dlXZN4b0/4UUI=
|
||||
|
|
|
@ -17,6 +17,9 @@ func (e *Executor) initConfig() {
|
|||
Use: "config",
|
||||
Short: "Config",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if len(args) != 0 {
|
||||
e.log.Fatalf("Usage: golangci-lint config")
|
||||
}
|
||||
if err := cmd.Help(); err != nil {
|
||||
e.log.Fatalf("Can't run help: %s", err)
|
||||
}
|
||||
|
@ -34,7 +37,11 @@ func (e *Executor) initConfig() {
|
|||
|
||||
}
|
||||
|
||||
func (e *Executor) executePathCmd(cmd *cobra.Command, args []string) {
|
||||
func (e *Executor) executePathCmd(_ *cobra.Command, args []string) {
|
||||
if len(args) != 0 {
|
||||
e.log.Fatalf("Usage: golangci-lint config path")
|
||||
}
|
||||
|
||||
usedConfigFile := viper.ConfigFileUsed()
|
||||
if usedConfigFile == "" {
|
||||
e.log.Warnf("No config file detected")
|
||||
|
|
|
@ -17,6 +17,9 @@ func (e *Executor) initHelp() {
|
|||
Use: "help",
|
||||
Short: "Help",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if len(args) != 0 {
|
||||
e.log.Fatalf("Usage: golangci-lint help")
|
||||
}
|
||||
if err := cmd.Help(); err != nil {
|
||||
e.log.Fatalf("Can't run help: %s", err)
|
||||
}
|
||||
|
@ -43,7 +46,7 @@ func printLinterConfigs(lcs []*linter.Config) {
|
|||
}
|
||||
}
|
||||
|
||||
func (e *Executor) executeLintersHelp(cmd *cobra.Command, args []string) {
|
||||
func (e *Executor) executeLintersHelp(_ *cobra.Command, args []string) {
|
||||
if len(args) != 0 {
|
||||
e.log.Fatalf("Usage: golangci-lint help linters")
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ func IsLinterInConfigsList(name string, linters []*linter.Config) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
func (e *Executor) executeLinters(cmd *cobra.Command, args []string) {
|
||||
func (e *Executor) executeLinters(_ *cobra.Command, args []string) {
|
||||
if len(args) != 0 {
|
||||
e.log.Fatalf("Usage: golangci-lint linters")
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ import (
|
|||
"github.com/golangci/golangci-lint/pkg/logutils"
|
||||
)
|
||||
|
||||
func (e *Executor) persistentPreRun(cmd *cobra.Command, args []string) {
|
||||
func (e *Executor) persistentPreRun(_ *cobra.Command, _ []string) {
|
||||
if e.cfg.Run.PrintVersion {
|
||||
fmt.Fprintf(logutils.StdOut, "golangci-lint has version %s built from %s on %s\n", e.version, e.commit, e.date)
|
||||
os.Exit(0)
|
||||
|
@ -32,7 +32,7 @@ func (e *Executor) persistentPreRun(cmd *cobra.Command, args []string) {
|
|||
}
|
||||
}
|
||||
|
||||
func (e *Executor) persistentPostRun(cmd *cobra.Command, args []string) {
|
||||
func (e *Executor) persistentPostRun(_ *cobra.Command, _ []string) {
|
||||
if e.cfg.Run.CPUProfilePath != "" {
|
||||
pprof.StopCPUProfile()
|
||||
}
|
||||
|
@ -64,6 +64,9 @@ func (e *Executor) initRoot() {
|
|||
Short: "golangci-lint is a smart linters runner.",
|
||||
Long: `Smart, fast linters runner. Run it in cloud for every GitHub pull request on https://golangci.com`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if len(args) != 0 {
|
||||
e.log.Fatalf("Usage: golangci-lint")
|
||||
}
|
||||
if err := cmd.Help(); err != nil {
|
||||
e.log.Fatalf("Can't run help: %s", err)
|
||||
}
|
||||
|
|
|
@ -370,7 +370,7 @@ func (e *Executor) createPrinter() (printers.Printer, error) {
|
|||
return p, nil
|
||||
}
|
||||
|
||||
func (e *Executor) executeRun(cmd *cobra.Command, args []string) {
|
||||
func (e *Executor) executeRun(_ *cobra.Command, args []string) {
|
||||
needTrackResources := e.cfg.Run.IsVerbose || e.cfg.Run.PrintResourcesUsage
|
||||
trackResourcesEndCh := make(chan struct{})
|
||||
defer func() { // XXX: this defer must be before ctx.cancel defer
|
||||
|
|
|
@ -55,7 +55,7 @@ var DefaultExcludePatterns = []ExcludePattern{
|
|||
},
|
||||
{
|
||||
Pattern: "ineffective break statement. Did you mean to break out of the outer loop",
|
||||
Linter: "megacheck",
|
||||
Linter: "staticcheck",
|
||||
Why: "Developers tend to write in C-style with an explicit 'break' in a 'switch', so it's ok to ignore",
|
||||
},
|
||||
{
|
||||
|
|
|
@ -3,7 +3,7 @@ package golinters
|
|||
import (
|
||||
"context"
|
||||
|
||||
"github.com/golangci/interfacer/check"
|
||||
"mvdan.cc/interfacer/check"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/lint/linter"
|
||||
"github.com/golangci/golangci-lint/pkg/result"
|
||||
|
|
|
@ -2,65 +2,181 @@ package golinters
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/golangci/go-tools/config"
|
||||
"github.com/golangci/go-tools/stylecheck"
|
||||
|
||||
"github.com/golangci/go-tools/lint"
|
||||
"github.com/golangci/go-tools/lint/lintutil"
|
||||
"github.com/golangci/go-tools/simple"
|
||||
"github.com/golangci/go-tools/staticcheck"
|
||||
"github.com/golangci/go-tools/unused"
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
"golang.org/x/tools/go/loader"
|
||||
"golang.org/x/tools/go/packages"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/fsutils"
|
||||
"github.com/golangci/golangci-lint/pkg/lint/linter"
|
||||
libpackages "github.com/golangci/golangci-lint/pkg/packages"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/lint/linter"
|
||||
"github.com/golangci/golangci-lint/pkg/result"
|
||||
)
|
||||
|
||||
const megacheckName = "megacheck"
|
||||
const (
|
||||
MegacheckParentName = "megacheck"
|
||||
MegacheckStaticcheckName = "staticcheck"
|
||||
MegacheckUnusedName = "unused"
|
||||
MegacheckGosimpleName = "gosimple"
|
||||
MegacheckStylecheckName = "stylecheck"
|
||||
)
|
||||
|
||||
type Megacheck struct {
|
||||
UnusedEnabled bool
|
||||
GosimpleEnabled bool
|
||||
StaticcheckEnabled bool
|
||||
type Staticcheck struct {
|
||||
megacheck
|
||||
}
|
||||
|
||||
func (m Megacheck) Name() string {
|
||||
names := []string{}
|
||||
if m.UnusedEnabled {
|
||||
names = append(names, "unused")
|
||||
func NewStaticcheck() *Staticcheck {
|
||||
return &Staticcheck{
|
||||
megacheck: megacheck{
|
||||
staticcheckEnabled: true,
|
||||
},
|
||||
}
|
||||
if m.GosimpleEnabled {
|
||||
names = append(names, "gosimple")
|
||||
}
|
||||
if m.StaticcheckEnabled {
|
||||
names = append(names, "staticcheck")
|
||||
}
|
||||
|
||||
if len(names) == 1 {
|
||||
return names[0] // only one sublinter is enabled
|
||||
}
|
||||
|
||||
if len(names) == 3 {
|
||||
return megacheckName // all enabled
|
||||
}
|
||||
|
||||
return fmt.Sprintf("megacheck.{%s}", strings.Join(names, ","))
|
||||
}
|
||||
|
||||
func (m Megacheck) Desc() string {
|
||||
descs := map[string]string{
|
||||
"unused": "Checks Go code for unused constants, variables, functions and types",
|
||||
"gosimple": "Linter for Go source code that specializes in simplifying a code",
|
||||
"staticcheck": "Staticcheck is a go vet on steroids, applying a ton of static analysis checks",
|
||||
"megacheck": "3 sub-linters in one: unused, gosimple and staticcheck",
|
||||
func (Staticcheck) Name() string { return MegacheckStaticcheckName }
|
||||
func (Staticcheck) Desc() string {
|
||||
return "Staticcheck is a go vet on steroids, applying a ton of static analysis checks"
|
||||
}
|
||||
|
||||
type Gosimple struct {
|
||||
megacheck
|
||||
}
|
||||
|
||||
func NewGosimple() *Gosimple {
|
||||
return &Gosimple{
|
||||
megacheck: megacheck{
|
||||
gosimpleEnabled: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (Gosimple) Name() string { return MegacheckGosimpleName }
|
||||
func (Gosimple) Desc() string {
|
||||
return "Linter for Go source code that specializes in simplifying a code"
|
||||
}
|
||||
|
||||
type Unused struct {
|
||||
megacheck
|
||||
}
|
||||
|
||||
func NewUnused() *Unused {
|
||||
return &Unused{
|
||||
megacheck: megacheck{
|
||||
unusedEnabled: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (Unused) Name() string { return MegacheckUnusedName }
|
||||
func (Unused) Desc() string {
|
||||
return "Checks Go code for unused constants, variables, functions and types"
|
||||
}
|
||||
|
||||
type Stylecheck struct {
|
||||
megacheck
|
||||
}
|
||||
|
||||
func NewStylecheck() *Stylecheck {
|
||||
return &Stylecheck{
|
||||
megacheck: megacheck{
|
||||
stylecheckEnabled: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (Stylecheck) Name() string { return MegacheckStylecheckName }
|
||||
func (Stylecheck) Desc() string { return "Stylecheck is a replacement for golint" }
|
||||
|
||||
type megacheck struct {
|
||||
unusedEnabled bool
|
||||
gosimpleEnabled bool
|
||||
staticcheckEnabled bool
|
||||
stylecheckEnabled bool
|
||||
}
|
||||
|
||||
func (megacheck) Name() string {
|
||||
return MegacheckParentName
|
||||
}
|
||||
|
||||
func (megacheck) Desc() string {
|
||||
return "" // shouldn't be called
|
||||
}
|
||||
|
||||
func (m *megacheck) enableChildLinter(name string) error {
|
||||
switch name {
|
||||
case MegacheckStaticcheckName:
|
||||
m.staticcheckEnabled = true
|
||||
case MegacheckGosimpleName:
|
||||
m.gosimpleEnabled = true
|
||||
case MegacheckUnusedName:
|
||||
m.unusedEnabled = true
|
||||
case MegacheckStylecheckName:
|
||||
m.stylecheckEnabled = true
|
||||
default:
|
||||
return fmt.Errorf("invalid child linter name %s for metalinter %s", name, m.Name())
|
||||
}
|
||||
|
||||
return descs[m.Name()]
|
||||
return nil
|
||||
}
|
||||
|
||||
type MegacheckMetalinter struct{}
|
||||
|
||||
func (MegacheckMetalinter) Name() string {
|
||||
return MegacheckParentName
|
||||
}
|
||||
|
||||
func (MegacheckMetalinter) BuildLinterConfig(enabledChildren []string) (*linter.Config, error) {
|
||||
var m megacheck
|
||||
for _, name := range enabledChildren {
|
||||
if err := m.enableChildLinter(name); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: merge linter.Config and linter.Linter or refactor it in another way
|
||||
return &linter.Config{
|
||||
Linter: m,
|
||||
EnabledByDefault: false,
|
||||
NeedsTypeInfo: true,
|
||||
NeedsSSARepr: true,
|
||||
InPresets: []string{linter.PresetStyle, linter.PresetBugs, linter.PresetUnused},
|
||||
Speed: 1,
|
||||
AlternativeNames: nil,
|
||||
OriginalURL: "",
|
||||
ParentLinterName: "",
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (MegacheckMetalinter) DefaultChildLinterNames() []string {
|
||||
// no stylecheck here for backwards compatibility for users who enabled megacheck: don't enable extra
|
||||
// linter for them
|
||||
return []string{MegacheckStaticcheckName, MegacheckGosimpleName, MegacheckUnusedName}
|
||||
}
|
||||
|
||||
func (m MegacheckMetalinter) AllChildLinterNames() []string {
|
||||
return append(m.DefaultChildLinterNames(), MegacheckStylecheckName)
|
||||
}
|
||||
|
||||
func (m MegacheckMetalinter) isValidChild(name string) bool {
|
||||
for _, child := range m.AllChildLinterNames() {
|
||||
if child == name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func prettifyCompilationError(err packages.Error) error {
|
||||
|
@ -82,7 +198,7 @@ func prettifyCompilationError(err packages.Error) error {
|
|||
return errors.New(errText)
|
||||
}
|
||||
|
||||
func (m Megacheck) canAnalyze(lintCtx *linter.Context) bool {
|
||||
func (m megacheck) canAnalyze(lintCtx *linter.Context) bool {
|
||||
if len(lintCtx.NotCompilingPackages) == 0 {
|
||||
return true
|
||||
}
|
||||
|
@ -104,6 +220,7 @@ func (m Megacheck) canAnalyze(lintCtx *linter.Context) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// TODO: print real linter names in this message
|
||||
warnText := fmt.Sprintf("Can't run megacheck because of compilation errors in packages %s", errPkgs)
|
||||
if len(errs) != 0 {
|
||||
warnText += fmt.Sprintf(": %s", prettifyCompilationError(errs[0]))
|
||||
|
@ -118,56 +235,117 @@ func (m Megacheck) canAnalyze(lintCtx *linter.Context) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
func (m Megacheck) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) {
|
||||
func (m megacheck) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) {
|
||||
if !m.canAnalyze(lintCtx) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
issues := runMegacheck(lintCtx.Program, lintCtx.MegacheckSSAProgram, lintCtx.LoaderConfig,
|
||||
m.StaticcheckEnabled, m.GosimpleEnabled, m.UnusedEnabled, lintCtx.Settings().Unused.CheckExported)
|
||||
issues, err := m.runMegacheck(lintCtx.Packages, lintCtx.Settings().Unused.CheckExported)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to run megacheck")
|
||||
}
|
||||
|
||||
if len(issues) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
res := make([]result.Issue, 0, len(issues))
|
||||
meta := MegacheckMetalinter{}
|
||||
for _, i := range issues {
|
||||
if !meta.isValidChild(i.Checker) {
|
||||
lintCtx.Log.Warnf("Bad megacheck checker name %q", i.Checker)
|
||||
continue
|
||||
}
|
||||
|
||||
res = append(res, result.Issue{
|
||||
Pos: i.Position,
|
||||
Text: markIdentifiers(i.Text),
|
||||
FromLinter: m.Name(),
|
||||
FromLinter: i.Checker,
|
||||
})
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func runMegacheck(program *loader.Program, ssaProg *ssa.Program, conf *loader.Config,
|
||||
enableStaticcheck, enableGosimple, enableUnused, checkExportedUnused bool) []lint.Problem {
|
||||
func (m megacheck) runMegacheck(workingPkgs []*packages.Package, checkExportedUnused bool) ([]lint.Problem, error) {
|
||||
var checkers []lint.Checker
|
||||
|
||||
var checkers []lintutil.CheckerConfig
|
||||
|
||||
if enableStaticcheck {
|
||||
sac := staticcheck.NewChecker()
|
||||
checkers = append(checkers, lintutil.CheckerConfig{
|
||||
Checker: sac,
|
||||
})
|
||||
if m.gosimpleEnabled {
|
||||
checkers = append(checkers, simple.NewChecker())
|
||||
}
|
||||
|
||||
if enableGosimple {
|
||||
sc := simple.NewChecker()
|
||||
checkers = append(checkers, lintutil.CheckerConfig{
|
||||
Checker: sc,
|
||||
})
|
||||
if m.staticcheckEnabled {
|
||||
checkers = append(checkers, staticcheck.NewChecker())
|
||||
}
|
||||
|
||||
if enableUnused {
|
||||
if m.stylecheckEnabled {
|
||||
checkers = append(checkers, stylecheck.NewChecker())
|
||||
}
|
||||
if m.unusedEnabled {
|
||||
uc := unused.NewChecker(unused.CheckAll)
|
||||
uc.WholeProgram = checkExportedUnused
|
||||
uc.ConsiderReflection = true
|
||||
checkers = append(checkers, lintutil.CheckerConfig{
|
||||
Checker: unused.NewLintChecker(uc),
|
||||
})
|
||||
uc.WholeProgram = checkExportedUnused
|
||||
checkers = append(checkers, unused.NewLintChecker(uc))
|
||||
}
|
||||
|
||||
fs := lintutil.FlagSet(megacheckName)
|
||||
return lintutil.ProcessFlagSet(checkers, fs, program, ssaProg, conf)
|
||||
if len(checkers) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
cfg := config.Config{}
|
||||
opts := &lintutil.Options{
|
||||
// TODO: get current go version, but now it doesn't matter,
|
||||
// may be needed after next updates of megacheck
|
||||
GoVersion: 11,
|
||||
|
||||
Config: cfg,
|
||||
// TODO: support Ignores option
|
||||
}
|
||||
|
||||
return runMegacheckCheckers(checkers, opts, workingPkgs)
|
||||
}
|
||||
|
||||
// parseIgnore is a copy from megacheck code just to not fork megacheck
|
||||
func parseIgnore(s string) ([]lint.Ignore, error) {
|
||||
var out []lint.Ignore
|
||||
if len(s) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
for _, part := range strings.Fields(s) {
|
||||
p := strings.Split(part, ":")
|
||||
if len(p) != 2 {
|
||||
return nil, errors.New("malformed ignore string")
|
||||
}
|
||||
path := p[0]
|
||||
checks := strings.Split(p[1], ",")
|
||||
out = append(out, &lint.GlobIgnore{Pattern: path, Checks: checks})
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func runMegacheckCheckers(cs []lint.Checker, opt *lintutil.Options, workingPkgs []*packages.Package) ([]lint.Problem, error) {
|
||||
stats := lint.PerfStats{
|
||||
CheckerInits: map[string]time.Duration{},
|
||||
}
|
||||
|
||||
ignores, err := parseIgnore(opt.Ignores)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var problems []lint.Problem
|
||||
if len(workingPkgs) == 0 {
|
||||
return problems, nil
|
||||
}
|
||||
|
||||
l := &lint.Linter{
|
||||
Checkers: cs,
|
||||
Ignores: ignores,
|
||||
GoVersion: opt.GoVersion,
|
||||
ReturnIgnored: opt.ReturnIgnored,
|
||||
Config: opt.Config,
|
||||
|
||||
MaxConcurrentJobs: opt.MaxConcurrentJobs,
|
||||
PrintStats: opt.PrintStats,
|
||||
}
|
||||
problems = append(problems, l.Lint(workingPkgs, &stats)...)
|
||||
|
||||
return problems, nil
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ package golinters
|
|||
import (
|
||||
"context"
|
||||
|
||||
"github.com/golangci/unparam/check"
|
||||
"mvdan.cc/unparam/check"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/lint/linter"
|
||||
"github.com/golangci/golangci-lint/pkg/result"
|
||||
|
@ -25,7 +25,7 @@ func (lint Unparam) Run(ctx context.Context, lintCtx *linter.Context) ([]result.
|
|||
c := &check.Checker{}
|
||||
c.CallgraphAlgorithm(us.Algo)
|
||||
c.CheckExportedFuncs(us.CheckExported)
|
||||
c.Program(lintCtx.Program)
|
||||
c.Packages(lintCtx.Packages)
|
||||
c.ProgramSSA(lintCtx.SSAProgram)
|
||||
|
||||
unparamIssues, err := c.Check()
|
||||
|
|
|
@ -61,6 +61,9 @@ var replacePatterns = []replacePattern{
|
|||
"Blacklisted import `${1}`: weak cryptographic primitive"},
|
||||
{`^TLS InsecureSkipVerify set true.$`, "TLS `InsecureSkipVerify` set true."},
|
||||
|
||||
// gosimple
|
||||
{`^should replace loop with (.*)$`, "should replace loop with `${1}`"},
|
||||
|
||||
// megacheck
|
||||
{`^this value of (\S+) is never used$`, "this value of `${1}` is never used"},
|
||||
{`^should use time.Since instead of time.Now().Sub$`,
|
||||
|
|
|
@ -20,7 +20,8 @@ type Config struct {
|
|||
Speed int // more value means faster execution of linter
|
||||
AlternativeNames []string
|
||||
|
||||
OriginalURL string // URL of original (not forked) repo, needed for autogenerated README
|
||||
OriginalURL string // URL of original (not forked) repo, needed for autogenerated README
|
||||
ParentLinterName string // used only for megacheck's children now
|
||||
}
|
||||
|
||||
func (lc *Config) WithTypeInfo() *Config {
|
||||
|
@ -54,6 +55,11 @@ func (lc *Config) WithAlternativeNames(names ...string) *Config {
|
|||
return lc
|
||||
}
|
||||
|
||||
func (lc *Config) WithParent(parentLinterName string) *Config {
|
||||
lc.ParentLinterName = parentLinterName
|
||||
return lc
|
||||
}
|
||||
|
||||
func (lc *Config) GetSpeed() int {
|
||||
return lc.Speed
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
package linter
|
||||
|
||||
import (
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
"golang.org/x/tools/go/loader"
|
||||
"golang.org/x/tools/go/packages"
|
||||
"golang.org/x/tools/go/ssa"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/config"
|
||||
"github.com/golangci/golangci-lint/pkg/lint/astcache"
|
||||
|
@ -17,8 +17,7 @@ type Context struct {
|
|||
LoaderConfig *loader.Config // deprecated, don't use for new linters
|
||||
Program *loader.Program // deprecated, use Packages for new linters
|
||||
|
||||
SSAProgram *ssa.Program // for unparam and interfacer: they don't change it
|
||||
MegacheckSSAProgram *ssa.Program // for megacheck: it modifies ssa program
|
||||
SSAProgram *ssa.Program // for unparam and interfacer but not for megacheck (it change it)
|
||||
|
||||
Cfg *config.Config
|
||||
ASTCache *astcache.Cache
|
||||
|
|
8
pkg/lint/linter/metalinter.go
Normal file
8
pkg/lint/linter/metalinter.go
Normal file
|
@ -0,0 +1,8 @@
|
|||
package linter
|
||||
|
||||
type MetaLinter interface {
|
||||
Name() string
|
||||
BuildLinterConfig(enabledChildren []string) (*Config, error)
|
||||
AllChildLinterNames() []string
|
||||
DefaultChildLinterNames() []string
|
||||
}
|
|
@ -4,7 +4,6 @@ import (
|
|||
"sort"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/config"
|
||||
"github.com/golangci/golangci-lint/pkg/golinters"
|
||||
"github.com/golangci/golangci-lint/pkg/lint/linter"
|
||||
"github.com/golangci/golangci-lint/pkg/logutils"
|
||||
)
|
||||
|
@ -58,17 +57,29 @@ func (es EnabledSet) build(lcfg *config.Linters, enabledByDefaultLinters []*lint
|
|||
}
|
||||
}
|
||||
|
||||
metaLinters := es.m.GetMetaLinters()
|
||||
|
||||
for _, name := range lcfg.Enable {
|
||||
if metaLinter := metaLinters[name]; metaLinter != nil {
|
||||
// e.g. if we use --enable=megacheck we should add staticcheck,unused and gosimple to result set
|
||||
for _, childLinter := range metaLinter.DefaultChildLinterNames() {
|
||||
resultLintersSet[childLinter] = es.m.GetLinterConfig(childLinter)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
lc := es.m.GetLinterConfig(name)
|
||||
// it's important to use lc.Name() nor name because name can be alias
|
||||
resultLintersSet[lc.Name()] = lc
|
||||
}
|
||||
|
||||
for _, name := range lcfg.Disable {
|
||||
if name == "megacheck" {
|
||||
for _, ln := range getAllMegacheckSubLinterNames() {
|
||||
delete(resultLintersSet, ln)
|
||||
if metaLinter := metaLinters[name]; metaLinter != nil {
|
||||
// e.g. if we use --disable=megacheck we should remove staticcheck,unused and gosimple from result set
|
||||
for _, childLinter := range metaLinter.DefaultChildLinterNames() {
|
||||
delete(resultLintersSet, childLinter)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
lc := es.m.GetLinterConfig(name)
|
||||
|
@ -76,49 +87,32 @@ func (es EnabledSet) build(lcfg *config.Linters, enabledByDefaultLinters []*lint
|
|||
delete(resultLintersSet, lc.Name())
|
||||
}
|
||||
|
||||
es.optimizeLintersSet(resultLintersSet)
|
||||
return resultLintersSet
|
||||
}
|
||||
|
||||
func getAllMegacheckSubLinterNames() []string {
|
||||
unusedName := golinters.Megacheck{UnusedEnabled: true}.Name()
|
||||
gosimpleName := golinters.Megacheck{GosimpleEnabled: true}.Name()
|
||||
staticcheckName := golinters.Megacheck{StaticcheckEnabled: true}.Name()
|
||||
return []string{unusedName, gosimpleName, staticcheckName}
|
||||
}
|
||||
|
||||
func (es EnabledSet) optimizeLintersSet(linters map[string]*linter.Config) {
|
||||
unusedName := golinters.Megacheck{UnusedEnabled: true}.Name()
|
||||
gosimpleName := golinters.Megacheck{GosimpleEnabled: true}.Name()
|
||||
staticcheckName := golinters.Megacheck{StaticcheckEnabled: true}.Name()
|
||||
fullName := golinters.Megacheck{GosimpleEnabled: true, UnusedEnabled: true, StaticcheckEnabled: true}.Name()
|
||||
allNames := []string{unusedName, gosimpleName, staticcheckName, fullName}
|
||||
|
||||
megacheckCount := 0
|
||||
for _, n := range allNames {
|
||||
if linters[n] != nil {
|
||||
megacheckCount++
|
||||
for _, metaLinter := range es.m.GetMetaLinters() {
|
||||
var children []string
|
||||
for _, child := range metaLinter.AllChildLinterNames() {
|
||||
if _, ok := linters[child]; ok {
|
||||
children = append(children, child)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if megacheckCount <= 1 {
|
||||
return
|
||||
}
|
||||
if len(children) <= 1 {
|
||||
continue
|
||||
}
|
||||
|
||||
isFullEnabled := linters[fullName] != nil
|
||||
mega := golinters.Megacheck{
|
||||
UnusedEnabled: isFullEnabled || linters[unusedName] != nil,
|
||||
GosimpleEnabled: isFullEnabled || linters[gosimpleName] != nil,
|
||||
StaticcheckEnabled: isFullEnabled || linters[staticcheckName] != nil,
|
||||
for _, child := range children {
|
||||
delete(linters, child)
|
||||
}
|
||||
builtLinterConfig, err := metaLinter.BuildLinterConfig(children)
|
||||
if err != nil {
|
||||
panic("shouldn't fail during linter building: " + err.Error())
|
||||
}
|
||||
linters[metaLinter.Name()] = builtLinterConfig
|
||||
es.log.Infof("Optimized sublinters %s into metalinter %s", children, metaLinter.Name())
|
||||
}
|
||||
|
||||
for _, n := range allNames {
|
||||
delete(linters, n)
|
||||
}
|
||||
|
||||
lc := *es.m.GetLinterConfig("megacheck")
|
||||
lc.Linter = mega
|
||||
linters[mega.Name()] = &lc
|
||||
}
|
||||
|
||||
func (es EnabledSet) Get() ([]*linter.Config, error) {
|
||||
|
@ -127,17 +121,18 @@ func (es EnabledSet) Get() ([]*linter.Config, error) {
|
|||
}
|
||||
|
||||
resultLintersSet := es.build(&es.cfg.Linters, es.m.GetAllEnabledByDefaultLinters())
|
||||
es.verbosePrintLintersStatus(resultLintersSet)
|
||||
es.optimizeLintersSet(resultLintersSet)
|
||||
|
||||
var resultLinters []*linter.Config
|
||||
for _, lc := range resultLintersSet {
|
||||
resultLinters = append(resultLinters, lc)
|
||||
}
|
||||
|
||||
es.verbosePrintLintersStatus(resultLinters)
|
||||
return resultLinters, nil
|
||||
}
|
||||
|
||||
func (es EnabledSet) verbosePrintLintersStatus(lcs []*linter.Config) {
|
||||
func (es EnabledSet) verbosePrintLintersStatus(lcs map[string]*linter.Config) {
|
||||
var linterNames []string
|
||||
for _, lc := range lcs {
|
||||
linterNames = append(linterNames, lc.Name())
|
||||
|
|
|
@ -4,6 +4,8 @@ import (
|
|||
"sort"
|
||||
"testing"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/golinters"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/config"
|
||||
|
@ -20,23 +22,32 @@ func TestGetEnabledLintersSet(t *testing.T) {
|
|||
cases := []cs{
|
||||
{
|
||||
cfg: config.Linters{
|
||||
Disable: []string{"megacheck"},
|
||||
Disable: []string{golinters.MegacheckMetalinter{}.Name()},
|
||||
},
|
||||
name: "disable all linters from megacheck",
|
||||
def: getAllMegacheckSubLinterNames(),
|
||||
def: golinters.MegacheckMetalinter{}.DefaultChildLinterNames(),
|
||||
exp: nil, // all disabled
|
||||
},
|
||||
{
|
||||
cfg: config.Linters{
|
||||
Disable: []string{"staticcheck"},
|
||||
Disable: []string{golinters.MegacheckStaticcheckName},
|
||||
},
|
||||
name: "disable only staticcheck",
|
||||
def: getAllMegacheckSubLinterNames(),
|
||||
exp: []string{"megacheck.{unused,gosimple}"},
|
||||
def: golinters.MegacheckMetalinter{}.DefaultChildLinterNames(),
|
||||
exp: []string{golinters.MegacheckGosimpleName, golinters.MegacheckUnusedName},
|
||||
},
|
||||
{
|
||||
name: "merge into megacheck",
|
||||
def: getAllMegacheckSubLinterNames(),
|
||||
exp: []string{"megacheck"},
|
||||
name: "don't merge into megacheck",
|
||||
def: golinters.MegacheckMetalinter{}.DefaultChildLinterNames(),
|
||||
exp: golinters.MegacheckMetalinter{}.DefaultChildLinterNames(),
|
||||
},
|
||||
{
|
||||
name: "expand megacheck",
|
||||
cfg: config.Linters{
|
||||
Enable: []string{golinters.MegacheckMetalinter{}.Name()},
|
||||
},
|
||||
def: nil,
|
||||
exp: golinters.MegacheckMetalinter{}.DefaultChildLinterNames(),
|
||||
},
|
||||
{
|
||||
name: "don't disable anything",
|
||||
|
@ -87,8 +98,11 @@ func TestGetEnabledLintersSet(t *testing.T) {
|
|||
t.Run(c.name, func(t *testing.T) {
|
||||
var defaultLinters []*linter.Config
|
||||
for _, ln := range c.def {
|
||||
defaultLinters = append(defaultLinters, m.GetLinterConfig(ln))
|
||||
lc := m.GetLinterConfig(ln)
|
||||
assert.NotNil(t, lc, ln)
|
||||
defaultLinters = append(defaultLinters, lc)
|
||||
}
|
||||
|
||||
els := es.build(&c.cfg, defaultLinters)
|
||||
var enabledLinters []string
|
||||
for ln, lc := range els {
|
||||
|
|
|
@ -37,6 +37,10 @@ func (m Manager) allPresetsSet() map[string]bool {
|
|||
return ret
|
||||
}
|
||||
|
||||
func (m Manager) GetMetaLinter(name string) linter.MetaLinter {
|
||||
return m.GetMetaLinters()[name]
|
||||
}
|
||||
|
||||
func (m Manager) GetLinterConfig(name string) *linter.Config {
|
||||
lc, ok := m.nameToLC[name]
|
||||
if !ok {
|
||||
|
@ -57,12 +61,26 @@ func enableLinterConfigs(lcs []*linter.Config, isEnabled func(lc *linter.Config)
|
|||
return ret
|
||||
}
|
||||
|
||||
func (Manager) GetMetaLinters() map[string]linter.MetaLinter {
|
||||
metaLinters := []linter.MetaLinter{
|
||||
golinters.MegacheckMetalinter{},
|
||||
}
|
||||
|
||||
ret := map[string]linter.MetaLinter{}
|
||||
for _, metaLinter := range metaLinters {
|
||||
ret[metaLinter.Name()] = metaLinter
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (Manager) GetAllSupportedLinterConfigs() []*linter.Config {
|
||||
lcs := []*linter.Config{
|
||||
linter.NewConfig(golinters.Govet{}).
|
||||
WithTypeInfo().
|
||||
WithPresets(linter.PresetBugs).
|
||||
WithSpeed(4).
|
||||
WithAlternativeNames("vet", "vetshadow").
|
||||
WithURL("https://golang.org/cmd/vet/"),
|
||||
linter.NewConfig(golinters.Errcheck{}).
|
||||
WithTypeInfo().
|
||||
|
@ -74,21 +92,26 @@ func (Manager) GetAllSupportedLinterConfigs() []*linter.Config {
|
|||
WithSpeed(3).
|
||||
WithURL("https://github.com/golang/lint"),
|
||||
|
||||
linter.NewConfig(golinters.Megacheck{StaticcheckEnabled: true}).
|
||||
linter.NewConfig(golinters.NewStaticcheck()).
|
||||
WithSSA().
|
||||
WithPresets(linter.PresetBugs).
|
||||
WithSpeed(2).
|
||||
WithURL("https://staticcheck.io/"),
|
||||
linter.NewConfig(golinters.Megacheck{UnusedEnabled: true}).
|
||||
linter.NewConfig(golinters.NewUnused()).
|
||||
WithSSA().
|
||||
WithPresets(linter.PresetUnused).
|
||||
WithSpeed(5).
|
||||
WithURL("https://github.com/dominikh/go-tools/tree/master/cmd/unused"),
|
||||
linter.NewConfig(golinters.Megacheck{GosimpleEnabled: true}).
|
||||
linter.NewConfig(golinters.NewGosimple()).
|
||||
WithSSA().
|
||||
WithPresets(linter.PresetStyle).
|
||||
WithSpeed(5).
|
||||
WithURL("https://github.com/dominikh/go-tools/tree/master/cmd/gosimple"),
|
||||
linter.NewConfig(golinters.NewStylecheck()).
|
||||
WithSSA().
|
||||
WithPresets(linter.PresetStyle).
|
||||
WithSpeed(5).
|
||||
WithURL("https://github.com/dominikh/go-tools/tree/master/stylecheck"),
|
||||
|
||||
linter.NewConfig(golinters.Gosec{}).
|
||||
WithTypeInfo().
|
||||
|
@ -156,11 +179,6 @@ func (Manager) GetAllSupportedLinterConfigs() []*linter.Config {
|
|||
WithPresets(linter.PresetPerformance).
|
||||
WithSpeed(10).
|
||||
WithURL("https://github.com/mdempsky/maligned"),
|
||||
linter.NewConfig(golinters.Megacheck{GosimpleEnabled: true, UnusedEnabled: true, StaticcheckEnabled: true}).
|
||||
WithSSA().
|
||||
WithPresets(linter.PresetStyle, linter.PresetBugs, linter.PresetUnused).
|
||||
WithSpeed(1).
|
||||
WithURL("https://github.com/dominikh/go-tools/tree/master/cmd/megacheck"),
|
||||
linter.NewConfig(golinters.Depguard{}).
|
||||
WithTypeInfo().
|
||||
WithPresets(linter.PresetStyle).
|
||||
|
@ -207,22 +225,22 @@ func (Manager) GetAllSupportedLinterConfigs() []*linter.Config {
|
|||
}
|
||||
|
||||
isLocalRun := os.Getenv("GOLANGCI_COM_RUN") == ""
|
||||
enabled := map[string]bool{
|
||||
golinters.Govet{}.Name(): true,
|
||||
golinters.Errcheck{}.Name(): true,
|
||||
golinters.Megacheck{StaticcheckEnabled: true}.Name(): true,
|
||||
golinters.Megacheck{UnusedEnabled: true}.Name(): true,
|
||||
golinters.Megacheck{GosimpleEnabled: true}.Name(): true,
|
||||
golinters.Structcheck{}.Name(): true,
|
||||
golinters.Varcheck{}.Name(): true,
|
||||
golinters.Ineffassign{}.Name(): true,
|
||||
golinters.Deadcode{}.Name(): true,
|
||||
enabledByDefault := map[string]bool{
|
||||
golinters.Govet{}.Name(): true,
|
||||
golinters.Errcheck{}.Name(): true,
|
||||
golinters.Staticcheck{}.Name(): true,
|
||||
golinters.Unused{}.Name(): true,
|
||||
golinters.Gosimple{}.Name(): true,
|
||||
golinters.Structcheck{}.Name(): true,
|
||||
golinters.Varcheck{}.Name(): true,
|
||||
golinters.Ineffassign{}.Name(): true,
|
||||
golinters.Deadcode{}.Name(): true,
|
||||
|
||||
// don't typecheck for golangci.com: too many troubles
|
||||
golinters.TypeCheck{}.Name(): isLocalRun,
|
||||
}
|
||||
return enableLinterConfigs(lcs, func(lc *linter.Config) bool {
|
||||
return enabled[lc.Name()]
|
||||
return enabledByDefault[lc.Name()]
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ func (v Validator) validateLintersNames(cfg *config.Linters) error {
|
|||
allNames := append([]string{}, cfg.Enable...)
|
||||
allNames = append(allNames, cfg.Disable...)
|
||||
for _, name := range allNames {
|
||||
if v.m.GetLinterConfig(name) == nil {
|
||||
if v.m.GetLinterConfig(name) == nil && v.m.GetMetaLinter(name) == nil {
|
||||
return fmt.Errorf("no such linter %q", name)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,11 +11,11 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
"github.com/golangci/tools/go/ssa/ssautil"
|
||||
"github.com/pkg/errors"
|
||||
"golang.org/x/tools/go/loader"
|
||||
"golang.org/x/tools/go/packages"
|
||||
"golang.org/x/tools/go/ssa"
|
||||
"golang.org/x/tools/go/ssa/ssautil"
|
||||
|
||||
"github.com/golangci/golangci-lint/pkg/config"
|
||||
"github.com/golangci/golangci-lint/pkg/exitcodes"
|
||||
|
@ -126,12 +126,12 @@ func (cl ContextLoader) makeFakeLoaderProgram(pkgs []*packages.Package) *loader.
|
|||
}
|
||||
}
|
||||
|
||||
func (cl ContextLoader) buildSSAProgram(pkgs []*packages.Package, name string) *ssa.Program {
|
||||
func (cl ContextLoader) buildSSAProgram(pkgs []*packages.Package) *ssa.Program {
|
||||
startedAt := time.Now()
|
||||
var pkgsBuiltDuration time.Duration
|
||||
defer func() {
|
||||
cl.log.Infof("SSA %srepr building timing: packages building %s, total %s",
|
||||
name, pkgsBuiltDuration, time.Since(startedAt))
|
||||
cl.log.Infof("SSA repr building timing: packages building %s, total %s",
|
||||
pkgsBuiltDuration, time.Since(startedAt))
|
||||
}()
|
||||
|
||||
ssaProg, _ := ssautil.Packages(pkgs, ssa.GlobalDebug)
|
||||
|
@ -332,10 +332,9 @@ func (cl ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*li
|
|||
prog = cl.makeFakeLoaderProgram(pkgs)
|
||||
}
|
||||
|
||||
var ssaProg, megacheckSSAProg *ssa.Program
|
||||
var ssaProg *ssa.Program
|
||||
if loadMode == packages.LoadAllSyntax {
|
||||
ssaProg = cl.buildSSAProgram(pkgs, "")
|
||||
megacheckSSAProg = cl.buildSSAProgram(pkgs, "for megacheck ")
|
||||
ssaProg = cl.buildSSAProgram(pkgs)
|
||||
}
|
||||
|
||||
astLog := cl.log.Child("astcache")
|
||||
|
@ -345,10 +344,9 @@ func (cl ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*li
|
|||
}
|
||||
|
||||
ret := &linter.Context{
|
||||
Packages: pkgs,
|
||||
Program: prog,
|
||||
SSAProgram: ssaProg,
|
||||
MegacheckSSAProgram: megacheckSSAProg,
|
||||
Packages: pkgs,
|
||||
Program: prog,
|
||||
SSAProgram: ssaProg,
|
||||
LoaderConfig: &loader.Config{
|
||||
Cwd: "", // used by depguard and fallbacked to os.Getcwd
|
||||
Build: nil, // used by depguard and megacheck and fallbacked to build.Default
|
||||
|
|
|
@ -176,43 +176,71 @@ func (p *Nolint) extractFileCommentsInlineRanges(fset *token.FileSet, comments .
|
|||
var ret []ignoredRange
|
||||
for _, g := range comments {
|
||||
for _, c := range g.List {
|
||||
text := strings.TrimLeft(c.Text, "/ ")
|
||||
if !strings.HasPrefix(text, "nolint") {
|
||||
continue
|
||||
ir := p.extractInlineRangeFromComment(c.Text, g, fset)
|
||||
if ir != nil {
|
||||
ret = append(ret, *ir)
|
||||
}
|
||||
|
||||
var linters []string
|
||||
if strings.HasPrefix(text, "nolint:") {
|
||||
// ignore specific linters
|
||||
text = strings.Split(text, "//")[0] // allow another comment after this comment
|
||||
linterItems := strings.Split(strings.TrimPrefix(text, "nolint:"), ",")
|
||||
for _, linter := range linterItems {
|
||||
linterName := strings.ToLower(strings.TrimSpace(linter))
|
||||
lc := p.dbManager.GetLinterConfig(linterName)
|
||||
if lc == nil {
|
||||
p.unknownLintersSet[linterName] = true
|
||||
continue
|
||||
}
|
||||
linters = append(linters, lc.Name()) // normalize name to work with aliases
|
||||
}
|
||||
} // else ignore all linters
|
||||
nolintDebugf("%d: linters are %s", fset.Position(g.Pos()).Line, linters)
|
||||
|
||||
pos := fset.Position(g.Pos())
|
||||
ret = append(ret, ignoredRange{
|
||||
Range: result.Range{
|
||||
From: pos.Line,
|
||||
To: fset.Position(g.End()).Line,
|
||||
},
|
||||
col: pos.Column,
|
||||
linters: linters,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (p *Nolint) extractInlineRangeFromComment(text string, g ast.Node, fset *token.FileSet) *ignoredRange {
|
||||
text = strings.TrimLeft(text, "/ ")
|
||||
if !strings.HasPrefix(text, "nolint") {
|
||||
return nil
|
||||
}
|
||||
|
||||
buildRange := func(linters []string) *ignoredRange {
|
||||
pos := fset.Position(g.Pos())
|
||||
return &ignoredRange{
|
||||
Range: result.Range{
|
||||
From: pos.Line,
|
||||
To: fset.Position(g.End()).Line,
|
||||
},
|
||||
col: pos.Column,
|
||||
linters: linters,
|
||||
}
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(text, "nolint:") {
|
||||
return buildRange(nil) // ignore all linters
|
||||
}
|
||||
|
||||
// ignore specific linters
|
||||
var linters []string
|
||||
text = strings.Split(text, "//")[0] // allow another comment after this comment
|
||||
linterItems := strings.Split(strings.TrimPrefix(text, "nolint:"), ",")
|
||||
var gotUnknownLinters bool
|
||||
for _, linter := range linterItems {
|
||||
linterName := strings.ToLower(strings.TrimSpace(linter))
|
||||
metaLinter := p.dbManager.GetMetaLinter(linterName)
|
||||
if metaLinter != nil {
|
||||
// user can set metalinter name in nolint directive (e.g. megacheck), then
|
||||
// we should add to nolint all the metalinter's default children
|
||||
linters = append(linters, metaLinter.DefaultChildLinterNames()...)
|
||||
continue
|
||||
}
|
||||
|
||||
lc := p.dbManager.GetLinterConfig(linterName)
|
||||
if lc == nil {
|
||||
p.unknownLintersSet[linterName] = true
|
||||
gotUnknownLinters = true
|
||||
continue
|
||||
}
|
||||
|
||||
linters = append(linters, lc.Name()) // normalize name to work with aliases
|
||||
}
|
||||
|
||||
if gotUnknownLinters {
|
||||
return buildRange(nil) // ignore all linters to not annoy user
|
||||
}
|
||||
|
||||
nolintDebugf("%d: linters are %s", fset.Position(g.Pos()).Line, linters)
|
||||
return buildRange(linters)
|
||||
}
|
||||
|
||||
func (p Nolint) Finish() {
|
||||
if len(p.unknownLintersSet) == 0 {
|
||||
return
|
||||
|
|
|
@ -74,23 +74,6 @@ func getEnabledByDefaultFastLintersWith(with ...string) []string {
|
|||
return ret
|
||||
}
|
||||
|
||||
func mergeMegacheck(linters []string) []string {
|
||||
if inSlice(linters, "staticcheck") &&
|
||||
inSlice(linters, "gosimple") &&
|
||||
inSlice(linters, "unused") {
|
||||
ret := []string{"megacheck"}
|
||||
for _, linter := range linters {
|
||||
if !inSlice([]string{"staticcheck", "gosimple", "unused"}, linter) {
|
||||
ret = append(ret, linter)
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
return linters
|
||||
}
|
||||
|
||||
func TestEnabledLinters(t *testing.T) {
|
||||
type tc struct {
|
||||
name string
|
||||
|
@ -180,8 +163,8 @@ func TestEnabledLinters(t *testing.T) {
|
|||
},
|
||||
{
|
||||
name: "fast option combined with enable and enable-all",
|
||||
args: "--enable-all --fast --enable=megacheck",
|
||||
el: getAllFastLintersWith("megacheck"),
|
||||
args: "--enable-all --fast --enable=staticcheck",
|
||||
el: getAllFastLintersWith("staticcheck"),
|
||||
noImplicitFast: true,
|
||||
},
|
||||
}
|
||||
|
@ -197,10 +180,9 @@ func TestEnabledLinters(t *testing.T) {
|
|||
runArgs = append(runArgs, strings.Split(c.args, " ")...)
|
||||
}
|
||||
r := testshared.NewLintRunner(t).RunWithYamlConfig(c.cfg, runArgs...)
|
||||
el := mergeMegacheck(c.el)
|
||||
sort.StringSlice(el).Sort()
|
||||
sort.StringSlice(c.el).Sort()
|
||||
|
||||
expectedLine := fmt.Sprintf("Active %d linters: [%s]", len(el), strings.Join(el, " "))
|
||||
expectedLine := fmt.Sprintf("Active %d linters: [%s]", len(c.el), strings.Join(c.el, " "))
|
||||
r.ExpectOutputContains(expectedLine)
|
||||
})
|
||||
}
|
10
test/testdata/gosec.go
vendored
10
test/testdata/gosec.go
vendored
|
@ -10,3 +10,13 @@ func Gosec() {
|
|||
h := md5.New() // ERROR "G401: Use of weak cryptographic primitive"
|
||||
log.Print(h)
|
||||
}
|
||||
|
||||
func GosecNolintGas() {
|
||||
h := md5.New() //nolint:gas
|
||||
log.Print(h)
|
||||
}
|
||||
|
||||
func GosecNolintGosec() {
|
||||
h := md5.New() //nolint:gosec
|
||||
log.Print(h)
|
||||
}
|
||||
|
|
25
test/testdata/gosimple.go
vendored
Normal file
25
test/testdata/gosimple.go
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
//args: -Egosimple
|
||||
package testdata
|
||||
|
||||
import "strings"
|
||||
|
||||
func Gosimple(id1, s1 string) string {
|
||||
if strings.HasPrefix(id1, s1) { // ERROR "should replace.*with.*strings.TrimPrefix"
|
||||
id1 = strings.TrimPrefix(id1, s1)
|
||||
}
|
||||
return id1
|
||||
}
|
||||
|
||||
func GosimpleNolintGosimple(id1, s1 string) string {
|
||||
if strings.HasPrefix(id1, s1) { //nolint:gosimple
|
||||
id1 = strings.TrimPrefix(id1, s1)
|
||||
}
|
||||
return id1
|
||||
}
|
||||
|
||||
func GosimpleNolintMegacheck(id1, s1 string) string {
|
||||
if strings.HasPrefix(id1, s1) { //nolint:megacheck
|
||||
id1 = strings.TrimPrefix(id1, s1)
|
||||
}
|
||||
return id1
|
||||
}
|
8
test/testdata/govet.go
vendored
8
test/testdata/govet.go
vendored
|
@ -22,3 +22,11 @@ func GovetShadow(f io.Reader, buf []byte) (err error) {
|
|||
_ = err
|
||||
return
|
||||
}
|
||||
|
||||
func GovetNolintVet() error {
|
||||
return &os.PathError{"first", "path", os.ErrNotExist} //nolint:vet
|
||||
}
|
||||
|
||||
func GovetNolintVetShadow() error {
|
||||
return &os.PathError{"first", "path", os.ErrNotExist} //nolint:vetshadow
|
||||
}
|
||||
|
|
17
test/testdata/staticcheck.go
vendored
Normal file
17
test/testdata/staticcheck.go
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
//args: -Estaticcheck
|
||||
package testdata
|
||||
|
||||
func Staticcheck() {
|
||||
var x int
|
||||
x = x // ERROR "self-assignment of x to x"
|
||||
}
|
||||
|
||||
func StaticcheckNolintStaticcheck() {
|
||||
var x int
|
||||
x = x //nolint:staticcheck
|
||||
}
|
||||
|
||||
func StaticcheckNolintMegacheck() {
|
||||
var x int
|
||||
x = x //nolint:megacheck
|
||||
}
|
17
test/testdata/staticcheck_in_megacheck.go
vendored
Normal file
17
test/testdata/staticcheck_in_megacheck.go
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
//args: -Emegacheck
|
||||
package testdata
|
||||
|
||||
func StaticcheckInMegacheck() {
|
||||
var x int
|
||||
x = x // ERROR "self-assignment of x to x"
|
||||
}
|
||||
|
||||
func StaticcheckNolintStaticcheckInMegacheck() {
|
||||
var x int
|
||||
x = x //nolint:staticcheck
|
||||
}
|
||||
|
||||
func StaticcheckNolintMegacheckInMegacheck() {
|
||||
var x int
|
||||
x = x //nolint:megacheck
|
||||
}
|
20
test/testdata/stylecheck.go
vendored
Normal file
20
test/testdata/stylecheck.go
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
//args: -Estylecheck
|
||||
package testdata
|
||||
|
||||
func Stylecheck(x int) {
|
||||
if 0 == x { // ERROR "don't use Yoda conditions"
|
||||
panic(x)
|
||||
}
|
||||
}
|
||||
|
||||
func StylecheckNolintStylecheck(x int) {
|
||||
if 0 == x { //nolint:stylecheck
|
||||
panic(x)
|
||||
}
|
||||
}
|
||||
|
||||
func StylecheckNolintMegacheck(x int) {
|
||||
if 0 == x { //nolint:megacheck // ERROR "don't use Yoda conditions"
|
||||
panic(x)
|
||||
}
|
||||
}
|
|
@ -1,7 +1,13 @@
|
|||
//args: -Emegacheck
|
||||
package testdata
|
||||
|
||||
func Megacheck() {
|
||||
func StylecheckNotInMegacheck(x int) {
|
||||
if 0 == x {
|
||||
panic(x)
|
||||
}
|
||||
}
|
||||
|
||||
func Staticcheck2() {
|
||||
var x int
|
||||
x = x // ERROR "self-assignment of x to x"
|
||||
}
|
8
test/testdata/unused.go
vendored
Normal file
8
test/testdata/unused.go
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
//args: -Eunused
|
||||
package testdata
|
||||
|
||||
type unusedStruct struct{} // ERROR "type `unusedStruct` is unused"
|
||||
|
||||
type unusedStructNolintUnused struct{} //nolint:unused
|
||||
|
||||
type unusedStructNolintMegacheck struct{} //nolint:megacheck
|
5
vendor/github.com/BurntSushi/toml/.gitignore
generated
vendored
Normal file
5
vendor/github.com/BurntSushi/toml/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
TAGS
|
||||
tags
|
||||
.*.swp
|
||||
tomlcheck/tomlcheck
|
||||
toml.test
|
15
vendor/github.com/BurntSushi/toml/.travis.yml
generated
vendored
Normal file
15
vendor/github.com/BurntSushi/toml/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
language: go
|
||||
go:
|
||||
- 1.1
|
||||
- 1.2
|
||||
- 1.3
|
||||
- 1.4
|
||||
- 1.5
|
||||
- 1.6
|
||||
- tip
|
||||
install:
|
||||
- go install ./...
|
||||
- go get github.com/BurntSushi/toml-test
|
||||
script:
|
||||
- export PATH="$PATH:$HOME/gopath/bin"
|
||||
- make test
|
3
vendor/github.com/BurntSushi/toml/COMPATIBLE
generated
vendored
Normal file
3
vendor/github.com/BurntSushi/toml/COMPATIBLE
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
Compatible with TOML version
|
||||
[v0.4.0](https://github.com/toml-lang/toml/blob/v0.4.0/versions/en/toml-v0.4.0.md)
|
||||
|
21
vendor/github.com/BurntSushi/toml/COPYING
generated
vendored
Normal file
21
vendor/github.com/BurntSushi/toml/COPYING
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2013 TOML authors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
19
vendor/github.com/BurntSushi/toml/Makefile
generated
vendored
Normal file
19
vendor/github.com/BurntSushi/toml/Makefile
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
install:
|
||||
go install ./...
|
||||
|
||||
test: install
|
||||
go test -v
|
||||
toml-test toml-test-decoder
|
||||
toml-test -encoder toml-test-encoder
|
||||
|
||||
fmt:
|
||||
gofmt -w *.go */*.go
|
||||
colcheck *.go */*.go
|
||||
|
||||
tags:
|
||||
find ./ -name '*.go' -print0 | xargs -0 gotags > TAGS
|
||||
|
||||
push:
|
||||
git push origin master
|
||||
git push github master
|
||||
|
218
vendor/github.com/BurntSushi/toml/README.md
generated
vendored
Normal file
218
vendor/github.com/BurntSushi/toml/README.md
generated
vendored
Normal file
|
@ -0,0 +1,218 @@
|
|||
## TOML parser and encoder for Go with reflection
|
||||
|
||||
TOML stands for Tom's Obvious, Minimal Language. This Go package provides a
|
||||
reflection interface similar to Go's standard library `json` and `xml`
|
||||
packages. This package also supports the `encoding.TextUnmarshaler` and
|
||||
`encoding.TextMarshaler` interfaces so that you can define custom data
|
||||
representations. (There is an example of this below.)
|
||||
|
||||
Spec: https://github.com/toml-lang/toml
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md)
|
||||
|
||||
Documentation: https://godoc.org/github.com/BurntSushi/toml
|
||||
|
||||
Installation:
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/toml
|
||||
```
|
||||
|
||||
Try the toml validator:
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/toml/cmd/tomlv
|
||||
tomlv some-toml-file.toml
|
||||
```
|
||||
|
||||
[](https://travis-ci.org/BurntSushi/toml) [](https://godoc.org/github.com/BurntSushi/toml)
|
||||
|
||||
### Testing
|
||||
|
||||
This package passes all tests in
|
||||
[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder
|
||||
and the encoder.
|
||||
|
||||
### Examples
|
||||
|
||||
This package works similarly to how the Go standard library handles `XML`
|
||||
and `JSON`. Namely, data is loaded into Go values via reflection.
|
||||
|
||||
For the simplest example, consider some TOML file as just a list of keys
|
||||
and values:
|
||||
|
||||
```toml
|
||||
Age = 25
|
||||
Cats = [ "Cauchy", "Plato" ]
|
||||
Pi = 3.14
|
||||
Perfection = [ 6, 28, 496, 8128 ]
|
||||
DOB = 1987-07-05T05:45:00Z
|
||||
```
|
||||
|
||||
Which could be defined in Go as:
|
||||
|
||||
```go
|
||||
type Config struct {
|
||||
Age int
|
||||
Cats []string
|
||||
Pi float64
|
||||
Perfection []int
|
||||
DOB time.Time // requires `import time`
|
||||
}
|
||||
```
|
||||
|
||||
And then decoded with:
|
||||
|
||||
```go
|
||||
var conf Config
|
||||
if _, err := toml.Decode(tomlData, &conf); err != nil {
|
||||
// handle error
|
||||
}
|
||||
```
|
||||
|
||||
You can also use struct tags if your struct field name doesn't map to a TOML
|
||||
key value directly:
|
||||
|
||||
```toml
|
||||
some_key_NAME = "wat"
|
||||
```
|
||||
|
||||
```go
|
||||
type TOML struct {
|
||||
ObscureKey string `toml:"some_key_NAME"`
|
||||
}
|
||||
```
|
||||
|
||||
### Using the `encoding.TextUnmarshaler` interface
|
||||
|
||||
Here's an example that automatically parses duration strings into
|
||||
`time.Duration` values:
|
||||
|
||||
```toml
|
||||
[[song]]
|
||||
name = "Thunder Road"
|
||||
duration = "4m49s"
|
||||
|
||||
[[song]]
|
||||
name = "Stairway to Heaven"
|
||||
duration = "8m03s"
|
||||
```
|
||||
|
||||
Which can be decoded with:
|
||||
|
||||
```go
|
||||
type song struct {
|
||||
Name string
|
||||
Duration duration
|
||||
}
|
||||
type songs struct {
|
||||
Song []song
|
||||
}
|
||||
var favorites songs
|
||||
if _, err := toml.Decode(blob, &favorites); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
for _, s := range favorites.Song {
|
||||
fmt.Printf("%s (%s)\n", s.Name, s.Duration)
|
||||
}
|
||||
```
|
||||
|
||||
And you'll also need a `duration` type that satisfies the
|
||||
`encoding.TextUnmarshaler` interface:
|
||||
|
||||
```go
|
||||
type duration struct {
|
||||
time.Duration
|
||||
}
|
||||
|
||||
func (d *duration) UnmarshalText(text []byte) error {
|
||||
var err error
|
||||
d.Duration, err = time.ParseDuration(string(text))
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
### More complex usage
|
||||
|
||||
Here's an example of how to load the example from the official spec page:
|
||||
|
||||
```toml
|
||||
# This is a TOML document. Boom.
|
||||
|
||||
title = "TOML Example"
|
||||
|
||||
[owner]
|
||||
name = "Tom Preston-Werner"
|
||||
organization = "GitHub"
|
||||
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
|
||||
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
|
||||
|
||||
[database]
|
||||
server = "192.168.1.1"
|
||||
ports = [ 8001, 8001, 8002 ]
|
||||
connection_max = 5000
|
||||
enabled = true
|
||||
|
||||
[servers]
|
||||
|
||||
# You can indent as you please. Tabs or spaces. TOML don't care.
|
||||
[servers.alpha]
|
||||
ip = "10.0.0.1"
|
||||
dc = "eqdc10"
|
||||
|
||||
[servers.beta]
|
||||
ip = "10.0.0.2"
|
||||
dc = "eqdc10"
|
||||
|
||||
[clients]
|
||||
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
||||
|
||||
# Line breaks are OK when inside arrays
|
||||
hosts = [
|
||||
"alpha",
|
||||
"omega"
|
||||
]
|
||||
```
|
||||
|
||||
And the corresponding Go types are:
|
||||
|
||||
```go
|
||||
type tomlConfig struct {
|
||||
Title string
|
||||
Owner ownerInfo
|
||||
DB database `toml:"database"`
|
||||
Servers map[string]server
|
||||
Clients clients
|
||||
}
|
||||
|
||||
type ownerInfo struct {
|
||||
Name string
|
||||
Org string `toml:"organization"`
|
||||
Bio string
|
||||
DOB time.Time
|
||||
}
|
||||
|
||||
type database struct {
|
||||
Server string
|
||||
Ports []int
|
||||
ConnMax int `toml:"connection_max"`
|
||||
Enabled bool
|
||||
}
|
||||
|
||||
type server struct {
|
||||
IP string
|
||||
DC string
|
||||
}
|
||||
|
||||
type clients struct {
|
||||
Data [][]interface{}
|
||||
Hosts []string
|
||||
}
|
||||
```
|
||||
|
||||
Note that a case insensitive match will be tried if an exact match can't be
|
||||
found.
|
||||
|
||||
A working example of the above can be found in `_examples/example.{go,toml}`.
|
509
vendor/github.com/BurntSushi/toml/decode.go
generated
vendored
Normal file
509
vendor/github.com/BurntSushi/toml/decode.go
generated
vendored
Normal file
|
@ -0,0 +1,509 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func e(format string, args ...interface{}) error {
|
||||
return fmt.Errorf("toml: "+format, args...)
|
||||
}
|
||||
|
||||
// Unmarshaler is the interface implemented by objects that can unmarshal a
|
||||
// TOML description of themselves.
|
||||
type Unmarshaler interface {
|
||||
UnmarshalTOML(interface{}) error
|
||||
}
|
||||
|
||||
// Unmarshal decodes the contents of `p` in TOML format into a pointer `v`.
|
||||
func Unmarshal(p []byte, v interface{}) error {
|
||||
_, err := Decode(string(p), v)
|
||||
return err
|
||||
}
|
||||
|
||||
// Primitive is a TOML value that hasn't been decoded into a Go value.
|
||||
// When using the various `Decode*` functions, the type `Primitive` may
|
||||
// be given to any value, and its decoding will be delayed.
|
||||
//
|
||||
// A `Primitive` value can be decoded using the `PrimitiveDecode` function.
|
||||
//
|
||||
// The underlying representation of a `Primitive` value is subject to change.
|
||||
// Do not rely on it.
|
||||
//
|
||||
// N.B. Primitive values are still parsed, so using them will only avoid
|
||||
// the overhead of reflection. They can be useful when you don't know the
|
||||
// exact type of TOML data until run time.
|
||||
type Primitive struct {
|
||||
undecoded interface{}
|
||||
context Key
|
||||
}
|
||||
|
||||
// DEPRECATED!
|
||||
//
|
||||
// Use MetaData.PrimitiveDecode instead.
|
||||
func PrimitiveDecode(primValue Primitive, v interface{}) error {
|
||||
md := MetaData{decoded: make(map[string]bool)}
|
||||
return md.unify(primValue.undecoded, rvalue(v))
|
||||
}
|
||||
|
||||
// PrimitiveDecode is just like the other `Decode*` functions, except it
|
||||
// decodes a TOML value that has already been parsed. Valid primitive values
|
||||
// can *only* be obtained from values filled by the decoder functions,
|
||||
// including this method. (i.e., `v` may contain more `Primitive`
|
||||
// values.)
|
||||
//
|
||||
// Meta data for primitive values is included in the meta data returned by
|
||||
// the `Decode*` functions with one exception: keys returned by the Undecoded
|
||||
// method will only reflect keys that were decoded. Namely, any keys hidden
|
||||
// behind a Primitive will be considered undecoded. Executing this method will
|
||||
// update the undecoded keys in the meta data. (See the example.)
|
||||
func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error {
|
||||
md.context = primValue.context
|
||||
defer func() { md.context = nil }()
|
||||
return md.unify(primValue.undecoded, rvalue(v))
|
||||
}
|
||||
|
||||
// Decode will decode the contents of `data` in TOML format into a pointer
|
||||
// `v`.
|
||||
//
|
||||
// TOML hashes correspond to Go structs or maps. (Dealer's choice. They can be
|
||||
// used interchangeably.)
|
||||
//
|
||||
// TOML arrays of tables correspond to either a slice of structs or a slice
|
||||
// of maps.
|
||||
//
|
||||
// TOML datetimes correspond to Go `time.Time` values.
|
||||
//
|
||||
// All other TOML types (float, string, int, bool and array) correspond
|
||||
// to the obvious Go types.
|
||||
//
|
||||
// An exception to the above rules is if a type implements the
|
||||
// encoding.TextUnmarshaler interface. In this case, any primitive TOML value
|
||||
// (floats, strings, integers, booleans and datetimes) will be converted to
|
||||
// a byte string and given to the value's UnmarshalText method. See the
|
||||
// Unmarshaler example for a demonstration with time duration strings.
|
||||
//
|
||||
// Key mapping
|
||||
//
|
||||
// TOML keys can map to either keys in a Go map or field names in a Go
|
||||
// struct. The special `toml` struct tag may be used to map TOML keys to
|
||||
// struct fields that don't match the key name exactly. (See the example.)
|
||||
// A case insensitive match to struct names will be tried if an exact match
|
||||
// can't be found.
|
||||
//
|
||||
// The mapping between TOML values and Go values is loose. That is, there
|
||||
// may exist TOML values that cannot be placed into your representation, and
|
||||
// there may be parts of your representation that do not correspond to
|
||||
// TOML values. This loose mapping can be made stricter by using the IsDefined
|
||||
// and/or Undecoded methods on the MetaData returned.
|
||||
//
|
||||
// This decoder will not handle cyclic types. If a cyclic type is passed,
|
||||
// `Decode` will not terminate.
|
||||
func Decode(data string, v interface{}) (MetaData, error) {
|
||||
rv := reflect.ValueOf(v)
|
||||
if rv.Kind() != reflect.Ptr {
|
||||
return MetaData{}, e("Decode of non-pointer %s", reflect.TypeOf(v))
|
||||
}
|
||||
if rv.IsNil() {
|
||||
return MetaData{}, e("Decode of nil %s", reflect.TypeOf(v))
|
||||
}
|
||||
p, err := parse(data)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
md := MetaData{
|
||||
p.mapping, p.types, p.ordered,
|
||||
make(map[string]bool, len(p.ordered)), nil,
|
||||
}
|
||||
return md, md.unify(p.mapping, indirect(rv))
|
||||
}
|
||||
|
||||
// DecodeFile is just like Decode, except it will automatically read the
|
||||
// contents of the file at `fpath` and decode it for you.
|
||||
func DecodeFile(fpath string, v interface{}) (MetaData, error) {
|
||||
bs, err := ioutil.ReadFile(fpath)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
return Decode(string(bs), v)
|
||||
}
|
||||
|
||||
// DecodeReader is just like Decode, except it will consume all bytes
|
||||
// from the reader and decode it for you.
|
||||
func DecodeReader(r io.Reader, v interface{}) (MetaData, error) {
|
||||
bs, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
return Decode(string(bs), v)
|
||||
}
|
||||
|
||||
// unify performs a sort of type unification based on the structure of `rv`,
|
||||
// which is the client representation.
|
||||
//
|
||||
// Any type mismatch produces an error. Finding a type that we don't know
|
||||
// how to handle produces an unsupported type error.
|
||||
func (md *MetaData) unify(data interface{}, rv reflect.Value) error {
|
||||
|
||||
// Special case. Look for a `Primitive` value.
|
||||
if rv.Type() == reflect.TypeOf((*Primitive)(nil)).Elem() {
|
||||
// Save the undecoded data and the key context into the primitive
|
||||
// value.
|
||||
context := make(Key, len(md.context))
|
||||
copy(context, md.context)
|
||||
rv.Set(reflect.ValueOf(Primitive{
|
||||
undecoded: data,
|
||||
context: context,
|
||||
}))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Special case. Unmarshaler Interface support.
|
||||
if rv.CanAddr() {
|
||||
if v, ok := rv.Addr().Interface().(Unmarshaler); ok {
|
||||
return v.UnmarshalTOML(data)
|
||||
}
|
||||
}
|
||||
|
||||
// Special case. Handle time.Time values specifically.
|
||||
// TODO: Remove this code when we decide to drop support for Go 1.1.
|
||||
// This isn't necessary in Go 1.2 because time.Time satisfies the encoding
|
||||
// interfaces.
|
||||
if rv.Type().AssignableTo(rvalue(time.Time{}).Type()) {
|
||||
return md.unifyDatetime(data, rv)
|
||||
}
|
||||
|
||||
// Special case. Look for a value satisfying the TextUnmarshaler interface.
|
||||
if v, ok := rv.Interface().(TextUnmarshaler); ok {
|
||||
return md.unifyText(data, v)
|
||||
}
|
||||
// BUG(burntsushi)
|
||||
// The behavior here is incorrect whenever a Go type satisfies the
|
||||
// encoding.TextUnmarshaler interface but also corresponds to a TOML
|
||||
// hash or array. In particular, the unmarshaler should only be applied
|
||||
// to primitive TOML values. But at this point, it will be applied to
|
||||
// all kinds of values and produce an incorrect error whenever those values
|
||||
// are hashes or arrays (including arrays of tables).
|
||||
|
||||
k := rv.Kind()
|
||||
|
||||
// laziness
|
||||
if k >= reflect.Int && k <= reflect.Uint64 {
|
||||
return md.unifyInt(data, rv)
|
||||
}
|
||||
switch k {
|
||||
case reflect.Ptr:
|
||||
elem := reflect.New(rv.Type().Elem())
|
||||
err := md.unify(data, reflect.Indirect(elem))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rv.Set(elem)
|
||||
return nil
|
||||
case reflect.Struct:
|
||||
return md.unifyStruct(data, rv)
|
||||
case reflect.Map:
|
||||
return md.unifyMap(data, rv)
|
||||
case reflect.Array:
|
||||
return md.unifyArray(data, rv)
|
||||
case reflect.Slice:
|
||||
return md.unifySlice(data, rv)
|
||||
case reflect.String:
|
||||
return md.unifyString(data, rv)
|
||||
case reflect.Bool:
|
||||
return md.unifyBool(data, rv)
|
||||
case reflect.Interface:
|
||||
// we only support empty interfaces.
|
||||
if rv.NumMethod() > 0 {
|
||||
return e("unsupported type %s", rv.Type())
|
||||
}
|
||||
return md.unifyAnything(data, rv)
|
||||
case reflect.Float32:
|
||||
fallthrough
|
||||
case reflect.Float64:
|
||||
return md.unifyFloat64(data, rv)
|
||||
}
|
||||
return e("unsupported type %s", rv.Kind())
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error {
|
||||
tmap, ok := mapping.(map[string]interface{})
|
||||
if !ok {
|
||||
if mapping == nil {
|
||||
return nil
|
||||
}
|
||||
return e("type mismatch for %s: expected table but found %T",
|
||||
rv.Type().String(), mapping)
|
||||
}
|
||||
|
||||
for key, datum := range tmap {
|
||||
var f *field
|
||||
fields := cachedTypeFields(rv.Type())
|
||||
for i := range fields {
|
||||
ff := &fields[i]
|
||||
if ff.name == key {
|
||||
f = ff
|
||||
break
|
||||
}
|
||||
if f == nil && strings.EqualFold(ff.name, key) {
|
||||
f = ff
|
||||
}
|
||||
}
|
||||
if f != nil {
|
||||
subv := rv
|
||||
for _, i := range f.index {
|
||||
subv = indirect(subv.Field(i))
|
||||
}
|
||||
if isUnifiable(subv) {
|
||||
md.decoded[md.context.add(key).String()] = true
|
||||
md.context = append(md.context, key)
|
||||
if err := md.unify(datum, subv); err != nil {
|
||||
return err
|
||||
}
|
||||
md.context = md.context[0 : len(md.context)-1]
|
||||
} else if f.name != "" {
|
||||
// Bad user! No soup for you!
|
||||
return e("cannot write unexported field %s.%s",
|
||||
rv.Type().String(), f.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error {
|
||||
tmap, ok := mapping.(map[string]interface{})
|
||||
if !ok {
|
||||
if tmap == nil {
|
||||
return nil
|
||||
}
|
||||
return badtype("map", mapping)
|
||||
}
|
||||
if rv.IsNil() {
|
||||
rv.Set(reflect.MakeMap(rv.Type()))
|
||||
}
|
||||
for k, v := range tmap {
|
||||
md.decoded[md.context.add(k).String()] = true
|
||||
md.context = append(md.context, k)
|
||||
|
||||
rvkey := indirect(reflect.New(rv.Type().Key()))
|
||||
rvval := reflect.Indirect(reflect.New(rv.Type().Elem()))
|
||||
if err := md.unify(v, rvval); err != nil {
|
||||
return err
|
||||
}
|
||||
md.context = md.context[0 : len(md.context)-1]
|
||||
|
||||
rvkey.SetString(k)
|
||||
rv.SetMapIndex(rvkey, rvval)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error {
|
||||
datav := reflect.ValueOf(data)
|
||||
if datav.Kind() != reflect.Slice {
|
||||
if !datav.IsValid() {
|
||||
return nil
|
||||
}
|
||||
return badtype("slice", data)
|
||||
}
|
||||
sliceLen := datav.Len()
|
||||
if sliceLen != rv.Len() {
|
||||
return e("expected array length %d; got TOML array of length %d",
|
||||
rv.Len(), sliceLen)
|
||||
}
|
||||
return md.unifySliceArray(datav, rv)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error {
|
||||
datav := reflect.ValueOf(data)
|
||||
if datav.Kind() != reflect.Slice {
|
||||
if !datav.IsValid() {
|
||||
return nil
|
||||
}
|
||||
return badtype("slice", data)
|
||||
}
|
||||
n := datav.Len()
|
||||
if rv.IsNil() || rv.Cap() < n {
|
||||
rv.Set(reflect.MakeSlice(rv.Type(), n, n))
|
||||
}
|
||||
rv.SetLen(n)
|
||||
return md.unifySliceArray(datav, rv)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifySliceArray(data, rv reflect.Value) error {
|
||||
sliceLen := data.Len()
|
||||
for i := 0; i < sliceLen; i++ {
|
||||
v := data.Index(i).Interface()
|
||||
sliceval := indirect(rv.Index(i))
|
||||
if err := md.unify(v, sliceval); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyDatetime(data interface{}, rv reflect.Value) error {
|
||||
if _, ok := data.(time.Time); ok {
|
||||
rv.Set(reflect.ValueOf(data))
|
||||
return nil
|
||||
}
|
||||
return badtype("time.Time", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error {
|
||||
if s, ok := data.(string); ok {
|
||||
rv.SetString(s)
|
||||
return nil
|
||||
}
|
||||
return badtype("string", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error {
|
||||
if num, ok := data.(float64); ok {
|
||||
switch rv.Kind() {
|
||||
case reflect.Float32:
|
||||
fallthrough
|
||||
case reflect.Float64:
|
||||
rv.SetFloat(num)
|
||||
default:
|
||||
panic("bug")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return badtype("float", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error {
|
||||
if num, ok := data.(int64); ok {
|
||||
if rv.Kind() >= reflect.Int && rv.Kind() <= reflect.Int64 {
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int64:
|
||||
// No bounds checking necessary.
|
||||
case reflect.Int8:
|
||||
if num < math.MinInt8 || num > math.MaxInt8 {
|
||||
return e("value %d is out of range for int8", num)
|
||||
}
|
||||
case reflect.Int16:
|
||||
if num < math.MinInt16 || num > math.MaxInt16 {
|
||||
return e("value %d is out of range for int16", num)
|
||||
}
|
||||
case reflect.Int32:
|
||||
if num < math.MinInt32 || num > math.MaxInt32 {
|
||||
return e("value %d is out of range for int32", num)
|
||||
}
|
||||
}
|
||||
rv.SetInt(num)
|
||||
} else if rv.Kind() >= reflect.Uint && rv.Kind() <= reflect.Uint64 {
|
||||
unum := uint64(num)
|
||||
switch rv.Kind() {
|
||||
case reflect.Uint, reflect.Uint64:
|
||||
// No bounds checking necessary.
|
||||
case reflect.Uint8:
|
||||
if num < 0 || unum > math.MaxUint8 {
|
||||
return e("value %d is out of range for uint8", num)
|
||||
}
|
||||
case reflect.Uint16:
|
||||
if num < 0 || unum > math.MaxUint16 {
|
||||
return e("value %d is out of range for uint16", num)
|
||||
}
|
||||
case reflect.Uint32:
|
||||
if num < 0 || unum > math.MaxUint32 {
|
||||
return e("value %d is out of range for uint32", num)
|
||||
}
|
||||
}
|
||||
rv.SetUint(unum)
|
||||
} else {
|
||||
panic("unreachable")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return badtype("integer", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error {
|
||||
if b, ok := data.(bool); ok {
|
||||
rv.SetBool(b)
|
||||
return nil
|
||||
}
|
||||
return badtype("boolean", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error {
|
||||
rv.Set(reflect.ValueOf(data))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyText(data interface{}, v TextUnmarshaler) error {
|
||||
var s string
|
||||
switch sdata := data.(type) {
|
||||
case TextMarshaler:
|
||||
text, err := sdata.MarshalText()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s = string(text)
|
||||
case fmt.Stringer:
|
||||
s = sdata.String()
|
||||
case string:
|
||||
s = sdata
|
||||
case bool:
|
||||
s = fmt.Sprintf("%v", sdata)
|
||||
case int64:
|
||||
s = fmt.Sprintf("%d", sdata)
|
||||
case float64:
|
||||
s = fmt.Sprintf("%f", sdata)
|
||||
default:
|
||||
return badtype("primitive (string-like)", data)
|
||||
}
|
||||
if err := v.UnmarshalText([]byte(s)); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// rvalue returns a reflect.Value of `v`. All pointers are resolved.
|
||||
func rvalue(v interface{}) reflect.Value {
|
||||
return indirect(reflect.ValueOf(v))
|
||||
}
|
||||
|
||||
// indirect returns the value pointed to by a pointer.
|
||||
// Pointers are followed until the value is not a pointer.
|
||||
// New values are allocated for each nil pointer.
|
||||
//
|
||||
// An exception to this rule is if the value satisfies an interface of
|
||||
// interest to us (like encoding.TextUnmarshaler).
|
||||
func indirect(v reflect.Value) reflect.Value {
|
||||
if v.Kind() != reflect.Ptr {
|
||||
if v.CanSet() {
|
||||
pv := v.Addr()
|
||||
if _, ok := pv.Interface().(TextUnmarshaler); ok {
|
||||
return pv
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
||||
if v.IsNil() {
|
||||
v.Set(reflect.New(v.Type().Elem()))
|
||||
}
|
||||
return indirect(reflect.Indirect(v))
|
||||
}
|
||||
|
||||
func isUnifiable(rv reflect.Value) bool {
|
||||
if rv.CanSet() {
|
||||
return true
|
||||
}
|
||||
if _, ok := rv.Interface().(TextUnmarshaler); ok {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func badtype(expected string, data interface{}) error {
|
||||
return e("cannot load TOML value of type %T into a Go %s", data, expected)
|
||||
}
|
121
vendor/github.com/BurntSushi/toml/decode_meta.go
generated
vendored
Normal file
121
vendor/github.com/BurntSushi/toml/decode_meta.go
generated
vendored
Normal file
|
@ -0,0 +1,121 @@
|
|||
package toml
|
||||
|
||||
import "strings"
|
||||
|
||||
// MetaData allows access to meta information about TOML data that may not
|
||||
// be inferrable via reflection. In particular, whether a key has been defined
|
||||
// and the TOML type of a key.
|
||||
type MetaData struct {
|
||||
mapping map[string]interface{}
|
||||
types map[string]tomlType
|
||||
keys []Key
|
||||
decoded map[string]bool
|
||||
context Key // Used only during decoding.
|
||||
}
|
||||
|
||||
// IsDefined returns true if the key given exists in the TOML data. The key
|
||||
// should be specified hierarchially. e.g.,
|
||||
//
|
||||
// // access the TOML key 'a.b.c'
|
||||
// IsDefined("a", "b", "c")
|
||||
//
|
||||
// IsDefined will return false if an empty key given. Keys are case sensitive.
|
||||
func (md *MetaData) IsDefined(key ...string) bool {
|
||||
if len(key) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
var hash map[string]interface{}
|
||||
var ok bool
|
||||
var hashOrVal interface{} = md.mapping
|
||||
for _, k := range key {
|
||||
if hash, ok = hashOrVal.(map[string]interface{}); !ok {
|
||||
return false
|
||||
}
|
||||
if hashOrVal, ok = hash[k]; !ok {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Type returns a string representation of the type of the key specified.
|
||||
//
|
||||
// Type will return the empty string if given an empty key or a key that
|
||||
// does not exist. Keys are case sensitive.
|
||||
func (md *MetaData) Type(key ...string) string {
|
||||
fullkey := strings.Join(key, ".")
|
||||
if typ, ok := md.types[fullkey]; ok {
|
||||
return typ.typeString()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// Key is the type of any TOML key, including key groups. Use (MetaData).Keys
|
||||
// to get values of this type.
|
||||
type Key []string
|
||||
|
||||
func (k Key) String() string {
|
||||
return strings.Join(k, ".")
|
||||
}
|
||||
|
||||
func (k Key) maybeQuotedAll() string {
|
||||
var ss []string
|
||||
for i := range k {
|
||||
ss = append(ss, k.maybeQuoted(i))
|
||||
}
|
||||
return strings.Join(ss, ".")
|
||||
}
|
||||
|
||||
func (k Key) maybeQuoted(i int) string {
|
||||
quote := false
|
||||
for _, c := range k[i] {
|
||||
if !isBareKeyChar(c) {
|
||||
quote = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if quote {
|
||||
return "\"" + strings.Replace(k[i], "\"", "\\\"", -1) + "\""
|
||||
}
|
||||
return k[i]
|
||||
}
|
||||
|
||||
func (k Key) add(piece string) Key {
|
||||
newKey := make(Key, len(k)+1)
|
||||
copy(newKey, k)
|
||||
newKey[len(k)] = piece
|
||||
return newKey
|
||||
}
|
||||
|
||||
// Keys returns a slice of every key in the TOML data, including key groups.
|
||||
// Each key is itself a slice, where the first element is the top of the
|
||||
// hierarchy and the last is the most specific.
|
||||
//
|
||||
// The list will have the same order as the keys appeared in the TOML data.
|
||||
//
|
||||
// All keys returned are non-empty.
|
||||
func (md *MetaData) Keys() []Key {
|
||||
return md.keys
|
||||
}
|
||||
|
||||
// Undecoded returns all keys that have not been decoded in the order in which
|
||||
// they appear in the original TOML document.
|
||||
//
|
||||
// This includes keys that haven't been decoded because of a Primitive value.
|
||||
// Once the Primitive value is decoded, the keys will be considered decoded.
|
||||
//
|
||||
// Also note that decoding into an empty interface will result in no decoding,
|
||||
// and so no keys will be considered decoded.
|
||||
//
|
||||
// In this sense, the Undecoded keys correspond to keys in the TOML document
|
||||
// that do not have a concrete type in your representation.
|
||||
func (md *MetaData) Undecoded() []Key {
|
||||
undecoded := make([]Key, 0, len(md.keys))
|
||||
for _, key := range md.keys {
|
||||
if !md.decoded[key.String()] {
|
||||
undecoded = append(undecoded, key)
|
||||
}
|
||||
}
|
||||
return undecoded
|
||||
}
|
27
vendor/github.com/BurntSushi/toml/doc.go
generated
vendored
Normal file
27
vendor/github.com/BurntSushi/toml/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
/*
|
||||
Package toml provides facilities for decoding and encoding TOML configuration
|
||||
files via reflection. There is also support for delaying decoding with
|
||||
the Primitive type, and querying the set of keys in a TOML document with the
|
||||
MetaData type.
|
||||
|
||||
The specification implemented: https://github.com/toml-lang/toml
|
||||
|
||||
The sub-command github.com/BurntSushi/toml/cmd/tomlv can be used to verify
|
||||
whether a file is a valid TOML document. It can also be used to print the
|
||||
type of each key in a TOML document.
|
||||
|
||||
Testing
|
||||
|
||||
There are two important types of tests used for this package. The first is
|
||||
contained inside '*_test.go' files and uses the standard Go unit testing
|
||||
framework. These tests are primarily devoted to holistically testing the
|
||||
decoder and encoder.
|
||||
|
||||
The second type of testing is used to verify the implementation's adherence
|
||||
to the TOML specification. These tests have been factored into their own
|
||||
project: https://github.com/BurntSushi/toml-test
|
||||
|
||||
The reason the tests are in a separate project is so that they can be used by
|
||||
any implementation of TOML. Namely, it is language agnostic.
|
||||
*/
|
||||
package toml
|
568
vendor/github.com/BurntSushi/toml/encode.go
generated
vendored
Normal file
568
vendor/github.com/BurntSushi/toml/encode.go
generated
vendored
Normal file
|
@ -0,0 +1,568 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type tomlEncodeError struct{ error }
|
||||
|
||||
var (
|
||||
errArrayMixedElementTypes = errors.New(
|
||||
"toml: cannot encode array with mixed element types")
|
||||
errArrayNilElement = errors.New(
|
||||
"toml: cannot encode array with nil element")
|
||||
errNonString = errors.New(
|
||||
"toml: cannot encode a map with non-string key type")
|
||||
errAnonNonStruct = errors.New(
|
||||
"toml: cannot encode an anonymous field that is not a struct")
|
||||
errArrayNoTable = errors.New(
|
||||
"toml: TOML array element cannot contain a table")
|
||||
errNoKey = errors.New(
|
||||
"toml: top-level values must be Go maps or structs")
|
||||
errAnything = errors.New("") // used in testing
|
||||
)
|
||||
|
||||
var quotedReplacer = strings.NewReplacer(
|
||||
"\t", "\\t",
|
||||
"\n", "\\n",
|
||||
"\r", "\\r",
|
||||
"\"", "\\\"",
|
||||
"\\", "\\\\",
|
||||
)
|
||||
|
||||
// Encoder controls the encoding of Go values to a TOML document to some
|
||||
// io.Writer.
|
||||
//
|
||||
// The indentation level can be controlled with the Indent field.
|
||||
type Encoder struct {
|
||||
// A single indentation level. By default it is two spaces.
|
||||
Indent string
|
||||
|
||||
// hasWritten is whether we have written any output to w yet.
|
||||
hasWritten bool
|
||||
w *bufio.Writer
|
||||
}
|
||||
|
||||
// NewEncoder returns a TOML encoder that encodes Go values to the io.Writer
|
||||
// given. By default, a single indentation level is 2 spaces.
|
||||
func NewEncoder(w io.Writer) *Encoder {
|
||||
return &Encoder{
|
||||
w: bufio.NewWriter(w),
|
||||
Indent: " ",
|
||||
}
|
||||
}
|
||||
|
||||
// Encode writes a TOML representation of the Go value to the underlying
|
||||
// io.Writer. If the value given cannot be encoded to a valid TOML document,
|
||||
// then an error is returned.
|
||||
//
|
||||
// The mapping between Go values and TOML values should be precisely the same
|
||||
// as for the Decode* functions. Similarly, the TextMarshaler interface is
|
||||
// supported by encoding the resulting bytes as strings. (If you want to write
|
||||
// arbitrary binary data then you will need to use something like base64 since
|
||||
// TOML does not have any binary types.)
|
||||
//
|
||||
// When encoding TOML hashes (i.e., Go maps or structs), keys without any
|
||||
// sub-hashes are encoded first.
|
||||
//
|
||||
// If a Go map is encoded, then its keys are sorted alphabetically for
|
||||
// deterministic output. More control over this behavior may be provided if
|
||||
// there is demand for it.
|
||||
//
|
||||
// Encoding Go values without a corresponding TOML representation---like map
|
||||
// types with non-string keys---will cause an error to be returned. Similarly
|
||||
// for mixed arrays/slices, arrays/slices with nil elements, embedded
|
||||
// non-struct types and nested slices containing maps or structs.
|
||||
// (e.g., [][]map[string]string is not allowed but []map[string]string is OK
|
||||
// and so is []map[string][]string.)
|
||||
func (enc *Encoder) Encode(v interface{}) error {
|
||||
rv := eindirect(reflect.ValueOf(v))
|
||||
if err := enc.safeEncode(Key([]string{}), rv); err != nil {
|
||||
return err
|
||||
}
|
||||
return enc.w.Flush()
|
||||
}
|
||||
|
||||
func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
if terr, ok := r.(tomlEncodeError); ok {
|
||||
err = terr.error
|
||||
return
|
||||
}
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
enc.encode(key, rv)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (enc *Encoder) encode(key Key, rv reflect.Value) {
|
||||
// Special case. Time needs to be in ISO8601 format.
|
||||
// Special case. If we can marshal the type to text, then we used that.
|
||||
// Basically, this prevents the encoder for handling these types as
|
||||
// generic structs (or whatever the underlying type of a TextMarshaler is).
|
||||
switch rv.Interface().(type) {
|
||||
case time.Time, TextMarshaler:
|
||||
enc.keyEqElement(key, rv)
|
||||
return
|
||||
}
|
||||
|
||||
k := rv.Kind()
|
||||
switch k {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
||||
reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
|
||||
reflect.Uint64,
|
||||
reflect.Float32, reflect.Float64, reflect.String, reflect.Bool:
|
||||
enc.keyEqElement(key, rv)
|
||||
case reflect.Array, reflect.Slice:
|
||||
if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) {
|
||||
enc.eArrayOfTables(key, rv)
|
||||
} else {
|
||||
enc.keyEqElement(key, rv)
|
||||
}
|
||||
case reflect.Interface:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.encode(key, rv.Elem())
|
||||
case reflect.Map:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.eTable(key, rv)
|
||||
case reflect.Ptr:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.encode(key, rv.Elem())
|
||||
case reflect.Struct:
|
||||
enc.eTable(key, rv)
|
||||
default:
|
||||
panic(e("unsupported type for key '%s': %s", key, k))
|
||||
}
|
||||
}
|
||||
|
||||
// eElement encodes any value that can be an array element (primitives and
|
||||
// arrays).
|
||||
func (enc *Encoder) eElement(rv reflect.Value) {
|
||||
switch v := rv.Interface().(type) {
|
||||
case time.Time:
|
||||
// Special case time.Time as a primitive. Has to come before
|
||||
// TextMarshaler below because time.Time implements
|
||||
// encoding.TextMarshaler, but we need to always use UTC.
|
||||
enc.wf(v.UTC().Format("2006-01-02T15:04:05Z"))
|
||||
return
|
||||
case TextMarshaler:
|
||||
// Special case. Use text marshaler if it's available for this value.
|
||||
if s, err := v.MarshalText(); err != nil {
|
||||
encPanic(err)
|
||||
} else {
|
||||
enc.writeQuoted(string(s))
|
||||
}
|
||||
return
|
||||
}
|
||||
switch rv.Kind() {
|
||||
case reflect.Bool:
|
||||
enc.wf(strconv.FormatBool(rv.Bool()))
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
||||
reflect.Int64:
|
||||
enc.wf(strconv.FormatInt(rv.Int(), 10))
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16,
|
||||
reflect.Uint32, reflect.Uint64:
|
||||
enc.wf(strconv.FormatUint(rv.Uint(), 10))
|
||||
case reflect.Float32:
|
||||
enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 32)))
|
||||
case reflect.Float64:
|
||||
enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 64)))
|
||||
case reflect.Array, reflect.Slice:
|
||||
enc.eArrayOrSliceElement(rv)
|
||||
case reflect.Interface:
|
||||
enc.eElement(rv.Elem())
|
||||
case reflect.String:
|
||||
enc.writeQuoted(rv.String())
|
||||
default:
|
||||
panic(e("unexpected primitive type: %s", rv.Kind()))
|
||||
}
|
||||
}
|
||||
|
||||
// By the TOML spec, all floats must have a decimal with at least one
|
||||
// number on either side.
|
||||
func floatAddDecimal(fstr string) string {
|
||||
if !strings.Contains(fstr, ".") {
|
||||
return fstr + ".0"
|
||||
}
|
||||
return fstr
|
||||
}
|
||||
|
||||
func (enc *Encoder) writeQuoted(s string) {
|
||||
enc.wf("\"%s\"", quotedReplacer.Replace(s))
|
||||
}
|
||||
|
||||
func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) {
|
||||
length := rv.Len()
|
||||
enc.wf("[")
|
||||
for i := 0; i < length; i++ {
|
||||
elem := rv.Index(i)
|
||||
enc.eElement(elem)
|
||||
if i != length-1 {
|
||||
enc.wf(", ")
|
||||
}
|
||||
}
|
||||
enc.wf("]")
|
||||
}
|
||||
|
||||
func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) {
|
||||
if len(key) == 0 {
|
||||
encPanic(errNoKey)
|
||||
}
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
trv := rv.Index(i)
|
||||
if isNil(trv) {
|
||||
continue
|
||||
}
|
||||
panicIfInvalidKey(key)
|
||||
enc.newline()
|
||||
enc.wf("%s[[%s]]", enc.indentStr(key), key.maybeQuotedAll())
|
||||
enc.newline()
|
||||
enc.eMapOrStruct(key, trv)
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) eTable(key Key, rv reflect.Value) {
|
||||
panicIfInvalidKey(key)
|
||||
if len(key) == 1 {
|
||||
// Output an extra newline between top-level tables.
|
||||
// (The newline isn't written if nothing else has been written though.)
|
||||
enc.newline()
|
||||
}
|
||||
if len(key) > 0 {
|
||||
enc.wf("%s[%s]", enc.indentStr(key), key.maybeQuotedAll())
|
||||
enc.newline()
|
||||
}
|
||||
enc.eMapOrStruct(key, rv)
|
||||
}
|
||||
|
||||
func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value) {
|
||||
switch rv := eindirect(rv); rv.Kind() {
|
||||
case reflect.Map:
|
||||
enc.eMap(key, rv)
|
||||
case reflect.Struct:
|
||||
enc.eStruct(key, rv)
|
||||
default:
|
||||
panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) eMap(key Key, rv reflect.Value) {
|
||||
rt := rv.Type()
|
||||
if rt.Key().Kind() != reflect.String {
|
||||
encPanic(errNonString)
|
||||
}
|
||||
|
||||
// Sort keys so that we have deterministic output. And write keys directly
|
||||
// underneath this key first, before writing sub-structs or sub-maps.
|
||||
var mapKeysDirect, mapKeysSub []string
|
||||
for _, mapKey := range rv.MapKeys() {
|
||||
k := mapKey.String()
|
||||
if typeIsHash(tomlTypeOfGo(rv.MapIndex(mapKey))) {
|
||||
mapKeysSub = append(mapKeysSub, k)
|
||||
} else {
|
||||
mapKeysDirect = append(mapKeysDirect, k)
|
||||
}
|
||||
}
|
||||
|
||||
var writeMapKeys = func(mapKeys []string) {
|
||||
sort.Strings(mapKeys)
|
||||
for _, mapKey := range mapKeys {
|
||||
mrv := rv.MapIndex(reflect.ValueOf(mapKey))
|
||||
if isNil(mrv) {
|
||||
// Don't write anything for nil fields.
|
||||
continue
|
||||
}
|
||||
enc.encode(key.add(mapKey), mrv)
|
||||
}
|
||||
}
|
||||
writeMapKeys(mapKeysDirect)
|
||||
writeMapKeys(mapKeysSub)
|
||||
}
|
||||
|
||||
func (enc *Encoder) eStruct(key Key, rv reflect.Value) {
|
||||
// Write keys for fields directly under this key first, because if we write
|
||||
// a field that creates a new table, then all keys under it will be in that
|
||||
// table (not the one we're writing here).
|
||||
rt := rv.Type()
|
||||
var fieldsDirect, fieldsSub [][]int
|
||||
var addFields func(rt reflect.Type, rv reflect.Value, start []int)
|
||||
addFields = func(rt reflect.Type, rv reflect.Value, start []int) {
|
||||
for i := 0; i < rt.NumField(); i++ {
|
||||
f := rt.Field(i)
|
||||
// skip unexported fields
|
||||
if f.PkgPath != "" && !f.Anonymous {
|
||||
continue
|
||||
}
|
||||
frv := rv.Field(i)
|
||||
if f.Anonymous {
|
||||
t := f.Type
|
||||
switch t.Kind() {
|
||||
case reflect.Struct:
|
||||
// Treat anonymous struct fields with
|
||||
// tag names as though they are not
|
||||
// anonymous, like encoding/json does.
|
||||
if getOptions(f.Tag).name == "" {
|
||||
addFields(t, frv, f.Index)
|
||||
continue
|
||||
}
|
||||
case reflect.Ptr:
|
||||
if t.Elem().Kind() == reflect.Struct &&
|
||||
getOptions(f.Tag).name == "" {
|
||||
if !frv.IsNil() {
|
||||
addFields(t.Elem(), frv.Elem(), f.Index)
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Fall through to the normal field encoding logic below
|
||||
// for non-struct anonymous fields.
|
||||
}
|
||||
}
|
||||
|
||||
if typeIsHash(tomlTypeOfGo(frv)) {
|
||||
fieldsSub = append(fieldsSub, append(start, f.Index...))
|
||||
} else {
|
||||
fieldsDirect = append(fieldsDirect, append(start, f.Index...))
|
||||
}
|
||||
}
|
||||
}
|
||||
addFields(rt, rv, nil)
|
||||
|
||||
var writeFields = func(fields [][]int) {
|
||||
for _, fieldIndex := range fields {
|
||||
sft := rt.FieldByIndex(fieldIndex)
|
||||
sf := rv.FieldByIndex(fieldIndex)
|
||||
if isNil(sf) {
|
||||
// Don't write anything for nil fields.
|
||||
continue
|
||||
}
|
||||
|
||||
opts := getOptions(sft.Tag)
|
||||
if opts.skip {
|
||||
continue
|
||||
}
|
||||
keyName := sft.Name
|
||||
if opts.name != "" {
|
||||
keyName = opts.name
|
||||
}
|
||||
if opts.omitempty && isEmpty(sf) {
|
||||
continue
|
||||
}
|
||||
if opts.omitzero && isZero(sf) {
|
||||
continue
|
||||
}
|
||||
|
||||
enc.encode(key.add(keyName), sf)
|
||||
}
|
||||
}
|
||||
writeFields(fieldsDirect)
|
||||
writeFields(fieldsSub)
|
||||
}
|
||||
|
||||
// tomlTypeName returns the TOML type name of the Go value's type. It is
|
||||
// used to determine whether the types of array elements are mixed (which is
|
||||
// forbidden). If the Go value is nil, then it is illegal for it to be an array
|
||||
// element, and valueIsNil is returned as true.
|
||||
|
||||
// Returns the TOML type of a Go value. The type may be `nil`, which means
|
||||
// no concrete TOML type could be found.
|
||||
func tomlTypeOfGo(rv reflect.Value) tomlType {
|
||||
if isNil(rv) || !rv.IsValid() {
|
||||
return nil
|
||||
}
|
||||
switch rv.Kind() {
|
||||
case reflect.Bool:
|
||||
return tomlBool
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
||||
reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
|
||||
reflect.Uint64:
|
||||
return tomlInteger
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return tomlFloat
|
||||
case reflect.Array, reflect.Slice:
|
||||
if typeEqual(tomlHash, tomlArrayType(rv)) {
|
||||
return tomlArrayHash
|
||||
}
|
||||
return tomlArray
|
||||
case reflect.Ptr, reflect.Interface:
|
||||
return tomlTypeOfGo(rv.Elem())
|
||||
case reflect.String:
|
||||
return tomlString
|
||||
case reflect.Map:
|
||||
return tomlHash
|
||||
case reflect.Struct:
|
||||
switch rv.Interface().(type) {
|
||||
case time.Time:
|
||||
return tomlDatetime
|
||||
case TextMarshaler:
|
||||
return tomlString
|
||||
default:
|
||||
return tomlHash
|
||||
}
|
||||
default:
|
||||
panic("unexpected reflect.Kind: " + rv.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
// tomlArrayType returns the element type of a TOML array. The type returned
|
||||
// may be nil if it cannot be determined (e.g., a nil slice or a zero length
|
||||
// slize). This function may also panic if it finds a type that cannot be
|
||||
// expressed in TOML (such as nil elements, heterogeneous arrays or directly
|
||||
// nested arrays of tables).
|
||||
func tomlArrayType(rv reflect.Value) tomlType {
|
||||
if isNil(rv) || !rv.IsValid() || rv.Len() == 0 {
|
||||
return nil
|
||||
}
|
||||
firstType := tomlTypeOfGo(rv.Index(0))
|
||||
if firstType == nil {
|
||||
encPanic(errArrayNilElement)
|
||||
}
|
||||
|
||||
rvlen := rv.Len()
|
||||
for i := 1; i < rvlen; i++ {
|
||||
elem := rv.Index(i)
|
||||
switch elemType := tomlTypeOfGo(elem); {
|
||||
case elemType == nil:
|
||||
encPanic(errArrayNilElement)
|
||||
case !typeEqual(firstType, elemType):
|
||||
encPanic(errArrayMixedElementTypes)
|
||||
}
|
||||
}
|
||||
// If we have a nested array, then we must make sure that the nested
|
||||
// array contains ONLY primitives.
|
||||
// This checks arbitrarily nested arrays.
|
||||
if typeEqual(firstType, tomlArray) || typeEqual(firstType, tomlArrayHash) {
|
||||
nest := tomlArrayType(eindirect(rv.Index(0)))
|
||||
if typeEqual(nest, tomlHash) || typeEqual(nest, tomlArrayHash) {
|
||||
encPanic(errArrayNoTable)
|
||||
}
|
||||
}
|
||||
return firstType
|
||||
}
|
||||
|
||||
type tagOptions struct {
|
||||
skip bool // "-"
|
||||
name string
|
||||
omitempty bool
|
||||
omitzero bool
|
||||
}
|
||||
|
||||
func getOptions(tag reflect.StructTag) tagOptions {
|
||||
t := tag.Get("toml")
|
||||
if t == "-" {
|
||||
return tagOptions{skip: true}
|
||||
}
|
||||
var opts tagOptions
|
||||
parts := strings.Split(t, ",")
|
||||
opts.name = parts[0]
|
||||
for _, s := range parts[1:] {
|
||||
switch s {
|
||||
case "omitempty":
|
||||
opts.omitempty = true
|
||||
case "omitzero":
|
||||
opts.omitzero = true
|
||||
}
|
||||
}
|
||||
return opts
|
||||
}
|
||||
|
||||
func isZero(rv reflect.Value) bool {
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return rv.Int() == 0
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
return rv.Uint() == 0
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return rv.Float() == 0.0
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isEmpty(rv reflect.Value) bool {
|
||||
switch rv.Kind() {
|
||||
case reflect.Array, reflect.Slice, reflect.Map, reflect.String:
|
||||
return rv.Len() == 0
|
||||
case reflect.Bool:
|
||||
return !rv.Bool()
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (enc *Encoder) newline() {
|
||||
if enc.hasWritten {
|
||||
enc.wf("\n")
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) keyEqElement(key Key, val reflect.Value) {
|
||||
if len(key) == 0 {
|
||||
encPanic(errNoKey)
|
||||
}
|
||||
panicIfInvalidKey(key)
|
||||
enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1))
|
||||
enc.eElement(val)
|
||||
enc.newline()
|
||||
}
|
||||
|
||||
func (enc *Encoder) wf(format string, v ...interface{}) {
|
||||
if _, err := fmt.Fprintf(enc.w, format, v...); err != nil {
|
||||
encPanic(err)
|
||||
}
|
||||
enc.hasWritten = true
|
||||
}
|
||||
|
||||
func (enc *Encoder) indentStr(key Key) string {
|
||||
return strings.Repeat(enc.Indent, len(key)-1)
|
||||
}
|
||||
|
||||
func encPanic(err error) {
|
||||
panic(tomlEncodeError{err})
|
||||
}
|
||||
|
||||
func eindirect(v reflect.Value) reflect.Value {
|
||||
switch v.Kind() {
|
||||
case reflect.Ptr, reflect.Interface:
|
||||
return eindirect(v.Elem())
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
func isNil(rv reflect.Value) bool {
|
||||
switch rv.Kind() {
|
||||
case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
|
||||
return rv.IsNil()
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func panicIfInvalidKey(key Key) {
|
||||
for _, k := range key {
|
||||
if len(k) == 0 {
|
||||
encPanic(e("Key '%s' is not a valid table name. Key names "+
|
||||
"cannot be empty.", key.maybeQuotedAll()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func isValidKeyName(s string) bool {
|
||||
return len(s) != 0
|
||||
}
|
19
vendor/github.com/BurntSushi/toml/encoding_types.go
generated
vendored
Normal file
19
vendor/github.com/BurntSushi/toml/encoding_types.go
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
// +build go1.2
|
||||
|
||||
package toml
|
||||
|
||||
// In order to support Go 1.1, we define our own TextMarshaler and
|
||||
// TextUnmarshaler types. For Go 1.2+, we just alias them with the
|
||||
// standard library interfaces.
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
)
|
||||
|
||||
// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here
|
||||
// so that Go 1.1 can be supported.
|
||||
type TextMarshaler encoding.TextMarshaler
|
||||
|
||||
// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined
|
||||
// here so that Go 1.1 can be supported.
|
||||
type TextUnmarshaler encoding.TextUnmarshaler
|
18
vendor/github.com/BurntSushi/toml/encoding_types_1.1.go
generated
vendored
Normal file
18
vendor/github.com/BurntSushi/toml/encoding_types_1.1.go
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
// +build !go1.2
|
||||
|
||||
package toml
|
||||
|
||||
// These interfaces were introduced in Go 1.2, so we add them manually when
|
||||
// compiling for Go 1.1.
|
||||
|
||||
// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here
|
||||
// so that Go 1.1 can be supported.
|
||||
type TextMarshaler interface {
|
||||
MarshalText() (text []byte, err error)
|
||||
}
|
||||
|
||||
// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined
|
||||
// here so that Go 1.1 can be supported.
|
||||
type TextUnmarshaler interface {
|
||||
UnmarshalText(text []byte) error
|
||||
}
|
953
vendor/github.com/BurntSushi/toml/lex.go
generated
vendored
Normal file
953
vendor/github.com/BurntSushi/toml/lex.go
generated
vendored
Normal file
|
@ -0,0 +1,953 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type itemType int
|
||||
|
||||
const (
|
||||
itemError itemType = iota
|
||||
itemNIL // used in the parser to indicate no type
|
||||
itemEOF
|
||||
itemText
|
||||
itemString
|
||||
itemRawString
|
||||
itemMultilineString
|
||||
itemRawMultilineString
|
||||
itemBool
|
||||
itemInteger
|
||||
itemFloat
|
||||
itemDatetime
|
||||
itemArray // the start of an array
|
||||
itemArrayEnd
|
||||
itemTableStart
|
||||
itemTableEnd
|
||||
itemArrayTableStart
|
||||
itemArrayTableEnd
|
||||
itemKeyStart
|
||||
itemCommentStart
|
||||
itemInlineTableStart
|
||||
itemInlineTableEnd
|
||||
)
|
||||
|
||||
const (
|
||||
eof = 0
|
||||
comma = ','
|
||||
tableStart = '['
|
||||
tableEnd = ']'
|
||||
arrayTableStart = '['
|
||||
arrayTableEnd = ']'
|
||||
tableSep = '.'
|
||||
keySep = '='
|
||||
arrayStart = '['
|
||||
arrayEnd = ']'
|
||||
commentStart = '#'
|
||||
stringStart = '"'
|
||||
stringEnd = '"'
|
||||
rawStringStart = '\''
|
||||
rawStringEnd = '\''
|
||||
inlineTableStart = '{'
|
||||
inlineTableEnd = '}'
|
||||
)
|
||||
|
||||
type stateFn func(lx *lexer) stateFn
|
||||
|
||||
type lexer struct {
|
||||
input string
|
||||
start int
|
||||
pos int
|
||||
line int
|
||||
state stateFn
|
||||
items chan item
|
||||
|
||||
// Allow for backing up up to three runes.
|
||||
// This is necessary because TOML contains 3-rune tokens (""" and ''').
|
||||
prevWidths [3]int
|
||||
nprev int // how many of prevWidths are in use
|
||||
// If we emit an eof, we can still back up, but it is not OK to call
|
||||
// next again.
|
||||
atEOF bool
|
||||
|
||||
// A stack of state functions used to maintain context.
|
||||
// The idea is to reuse parts of the state machine in various places.
|
||||
// For example, values can appear at the top level or within arbitrarily
|
||||
// nested arrays. The last state on the stack is used after a value has
|
||||
// been lexed. Similarly for comments.
|
||||
stack []stateFn
|
||||
}
|
||||
|
||||
type item struct {
|
||||
typ itemType
|
||||
val string
|
||||
line int
|
||||
}
|
||||
|
||||
func (lx *lexer) nextItem() item {
|
||||
for {
|
||||
select {
|
||||
case item := <-lx.items:
|
||||
return item
|
||||
default:
|
||||
lx.state = lx.state(lx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func lex(input string) *lexer {
|
||||
lx := &lexer{
|
||||
input: input,
|
||||
state: lexTop,
|
||||
line: 1,
|
||||
items: make(chan item, 10),
|
||||
stack: make([]stateFn, 0, 10),
|
||||
}
|
||||
return lx
|
||||
}
|
||||
|
||||
func (lx *lexer) push(state stateFn) {
|
||||
lx.stack = append(lx.stack, state)
|
||||
}
|
||||
|
||||
func (lx *lexer) pop() stateFn {
|
||||
if len(lx.stack) == 0 {
|
||||
return lx.errorf("BUG in lexer: no states to pop")
|
||||
}
|
||||
last := lx.stack[len(lx.stack)-1]
|
||||
lx.stack = lx.stack[0 : len(lx.stack)-1]
|
||||
return last
|
||||
}
|
||||
|
||||
func (lx *lexer) current() string {
|
||||
return lx.input[lx.start:lx.pos]
|
||||
}
|
||||
|
||||
func (lx *lexer) emit(typ itemType) {
|
||||
lx.items <- item{typ, lx.current(), lx.line}
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
func (lx *lexer) emitTrim(typ itemType) {
|
||||
lx.items <- item{typ, strings.TrimSpace(lx.current()), lx.line}
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
func (lx *lexer) next() (r rune) {
|
||||
if lx.atEOF {
|
||||
panic("next called after EOF")
|
||||
}
|
||||
if lx.pos >= len(lx.input) {
|
||||
lx.atEOF = true
|
||||
return eof
|
||||
}
|
||||
|
||||
if lx.input[lx.pos] == '\n' {
|
||||
lx.line++
|
||||
}
|
||||
lx.prevWidths[2] = lx.prevWidths[1]
|
||||
lx.prevWidths[1] = lx.prevWidths[0]
|
||||
if lx.nprev < 3 {
|
||||
lx.nprev++
|
||||
}
|
||||
r, w := utf8.DecodeRuneInString(lx.input[lx.pos:])
|
||||
lx.prevWidths[0] = w
|
||||
lx.pos += w
|
||||
return r
|
||||
}
|
||||
|
||||
// ignore skips over the pending input before this point.
|
||||
func (lx *lexer) ignore() {
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
// backup steps back one rune. Can be called only twice between calls to next.
|
||||
func (lx *lexer) backup() {
|
||||
if lx.atEOF {
|
||||
lx.atEOF = false
|
||||
return
|
||||
}
|
||||
if lx.nprev < 1 {
|
||||
panic("backed up too far")
|
||||
}
|
||||
w := lx.prevWidths[0]
|
||||
lx.prevWidths[0] = lx.prevWidths[1]
|
||||
lx.prevWidths[1] = lx.prevWidths[2]
|
||||
lx.nprev--
|
||||
lx.pos -= w
|
||||
if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' {
|
||||
lx.line--
|
||||
}
|
||||
}
|
||||
|
||||
// accept consumes the next rune if it's equal to `valid`.
|
||||
func (lx *lexer) accept(valid rune) bool {
|
||||
if lx.next() == valid {
|
||||
return true
|
||||
}
|
||||
lx.backup()
|
||||
return false
|
||||
}
|
||||
|
||||
// peek returns but does not consume the next rune in the input.
|
||||
func (lx *lexer) peek() rune {
|
||||
r := lx.next()
|
||||
lx.backup()
|
||||
return r
|
||||
}
|
||||
|
||||
// skip ignores all input that matches the given predicate.
|
||||
func (lx *lexer) skip(pred func(rune) bool) {
|
||||
for {
|
||||
r := lx.next()
|
||||
if pred(r) {
|
||||
continue
|
||||
}
|
||||
lx.backup()
|
||||
lx.ignore()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// errorf stops all lexing by emitting an error and returning `nil`.
|
||||
// Note that any value that is a character is escaped if it's a special
|
||||
// character (newlines, tabs, etc.).
|
||||
func (lx *lexer) errorf(format string, values ...interface{}) stateFn {
|
||||
lx.items <- item{
|
||||
itemError,
|
||||
fmt.Sprintf(format, values...),
|
||||
lx.line,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// lexTop consumes elements at the top level of TOML data.
|
||||
func lexTop(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isWhitespace(r) || isNL(r) {
|
||||
return lexSkip(lx, lexTop)
|
||||
}
|
||||
switch r {
|
||||
case commentStart:
|
||||
lx.push(lexTop)
|
||||
return lexCommentStart
|
||||
case tableStart:
|
||||
return lexTableStart
|
||||
case eof:
|
||||
if lx.pos > lx.start {
|
||||
return lx.errorf("unexpected EOF")
|
||||
}
|
||||
lx.emit(itemEOF)
|
||||
return nil
|
||||
}
|
||||
|
||||
// At this point, the only valid item can be a key, so we back up
|
||||
// and let the key lexer do the rest.
|
||||
lx.backup()
|
||||
lx.push(lexTopEnd)
|
||||
return lexKeyStart
|
||||
}
|
||||
|
||||
// lexTopEnd is entered whenever a top-level item has been consumed. (A value
|
||||
// or a table.) It must see only whitespace, and will turn back to lexTop
|
||||
// upon a newline. If it sees EOF, it will quit the lexer successfully.
|
||||
func lexTopEnd(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == commentStart:
|
||||
// a comment will read to a newline for us.
|
||||
lx.push(lexTop)
|
||||
return lexCommentStart
|
||||
case isWhitespace(r):
|
||||
return lexTopEnd
|
||||
case isNL(r):
|
||||
lx.ignore()
|
||||
return lexTop
|
||||
case r == eof:
|
||||
lx.emit(itemEOF)
|
||||
return nil
|
||||
}
|
||||
return lx.errorf("expected a top-level item to end with a newline, "+
|
||||
"comment, or EOF, but got %q instead", r)
|
||||
}
|
||||
|
||||
// lexTable lexes the beginning of a table. Namely, it makes sure that
|
||||
// it starts with a character other than '.' and ']'.
|
||||
// It assumes that '[' has already been consumed.
|
||||
// It also handles the case that this is an item in an array of tables.
|
||||
// e.g., '[[name]]'.
|
||||
func lexTableStart(lx *lexer) stateFn {
|
||||
if lx.peek() == arrayTableStart {
|
||||
lx.next()
|
||||
lx.emit(itemArrayTableStart)
|
||||
lx.push(lexArrayTableEnd)
|
||||
} else {
|
||||
lx.emit(itemTableStart)
|
||||
lx.push(lexTableEnd)
|
||||
}
|
||||
return lexTableNameStart
|
||||
}
|
||||
|
||||
func lexTableEnd(lx *lexer) stateFn {
|
||||
lx.emit(itemTableEnd)
|
||||
return lexTopEnd
|
||||
}
|
||||
|
||||
func lexArrayTableEnd(lx *lexer) stateFn {
|
||||
if r := lx.next(); r != arrayTableEnd {
|
||||
return lx.errorf("expected end of table array name delimiter %q, "+
|
||||
"but got %q instead", arrayTableEnd, r)
|
||||
}
|
||||
lx.emit(itemArrayTableEnd)
|
||||
return lexTopEnd
|
||||
}
|
||||
|
||||
func lexTableNameStart(lx *lexer) stateFn {
|
||||
lx.skip(isWhitespace)
|
||||
switch r := lx.peek(); {
|
||||
case r == tableEnd || r == eof:
|
||||
return lx.errorf("unexpected end of table name " +
|
||||
"(table names cannot be empty)")
|
||||
case r == tableSep:
|
||||
return lx.errorf("unexpected table separator " +
|
||||
"(table names cannot be empty)")
|
||||
case r == stringStart || r == rawStringStart:
|
||||
lx.ignore()
|
||||
lx.push(lexTableNameEnd)
|
||||
return lexValue // reuse string lexing
|
||||
default:
|
||||
return lexBareTableName
|
||||
}
|
||||
}
|
||||
|
||||
// lexBareTableName lexes the name of a table. It assumes that at least one
|
||||
// valid character for the table has already been read.
|
||||
func lexBareTableName(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isBareKeyChar(r) {
|
||||
return lexBareTableName
|
||||
}
|
||||
lx.backup()
|
||||
lx.emit(itemText)
|
||||
return lexTableNameEnd
|
||||
}
|
||||
|
||||
// lexTableNameEnd reads the end of a piece of a table name, optionally
|
||||
// consuming whitespace.
|
||||
func lexTableNameEnd(lx *lexer) stateFn {
|
||||
lx.skip(isWhitespace)
|
||||
switch r := lx.next(); {
|
||||
case isWhitespace(r):
|
||||
return lexTableNameEnd
|
||||
case r == tableSep:
|
||||
lx.ignore()
|
||||
return lexTableNameStart
|
||||
case r == tableEnd:
|
||||
return lx.pop()
|
||||
default:
|
||||
return lx.errorf("expected '.' or ']' to end table name, "+
|
||||
"but got %q instead", r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexKeyStart consumes a key name up until the first non-whitespace character.
|
||||
// lexKeyStart will ignore whitespace.
|
||||
func lexKeyStart(lx *lexer) stateFn {
|
||||
r := lx.peek()
|
||||
switch {
|
||||
case r == keySep:
|
||||
return lx.errorf("unexpected key separator %q", keySep)
|
||||
case isWhitespace(r) || isNL(r):
|
||||
lx.next()
|
||||
return lexSkip(lx, lexKeyStart)
|
||||
case r == stringStart || r == rawStringStart:
|
||||
lx.ignore()
|
||||
lx.emit(itemKeyStart)
|
||||
lx.push(lexKeyEnd)
|
||||
return lexValue // reuse string lexing
|
||||
default:
|
||||
lx.ignore()
|
||||
lx.emit(itemKeyStart)
|
||||
return lexBareKey
|
||||
}
|
||||
}
|
||||
|
||||
// lexBareKey consumes the text of a bare key. Assumes that the first character
|
||||
// (which is not whitespace) has not yet been consumed.
|
||||
func lexBareKey(lx *lexer) stateFn {
|
||||
switch r := lx.next(); {
|
||||
case isBareKeyChar(r):
|
||||
return lexBareKey
|
||||
case isWhitespace(r):
|
||||
lx.backup()
|
||||
lx.emit(itemText)
|
||||
return lexKeyEnd
|
||||
case r == keySep:
|
||||
lx.backup()
|
||||
lx.emit(itemText)
|
||||
return lexKeyEnd
|
||||
default:
|
||||
return lx.errorf("bare keys cannot contain %q", r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexKeyEnd consumes the end of a key and trims whitespace (up to the key
|
||||
// separator).
|
||||
func lexKeyEnd(lx *lexer) stateFn {
|
||||
switch r := lx.next(); {
|
||||
case r == keySep:
|
||||
return lexSkip(lx, lexValue)
|
||||
case isWhitespace(r):
|
||||
return lexSkip(lx, lexKeyEnd)
|
||||
default:
|
||||
return lx.errorf("expected key separator %q, but got %q instead",
|
||||
keySep, r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexValue starts the consumption of a value anywhere a value is expected.
|
||||
// lexValue will ignore whitespace.
|
||||
// After a value is lexed, the last state on the next is popped and returned.
|
||||
func lexValue(lx *lexer) stateFn {
|
||||
// We allow whitespace to precede a value, but NOT newlines.
|
||||
// In array syntax, the array states are responsible for ignoring newlines.
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r):
|
||||
return lexSkip(lx, lexValue)
|
||||
case isDigit(r):
|
||||
lx.backup() // avoid an extra state and use the same as above
|
||||
return lexNumberOrDateStart
|
||||
}
|
||||
switch r {
|
||||
case arrayStart:
|
||||
lx.ignore()
|
||||
lx.emit(itemArray)
|
||||
return lexArrayValue
|
||||
case inlineTableStart:
|
||||
lx.ignore()
|
||||
lx.emit(itemInlineTableStart)
|
||||
return lexInlineTableValue
|
||||
case stringStart:
|
||||
if lx.accept(stringStart) {
|
||||
if lx.accept(stringStart) {
|
||||
lx.ignore() // Ignore """
|
||||
return lexMultilineString
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
lx.ignore() // ignore the '"'
|
||||
return lexString
|
||||
case rawStringStart:
|
||||
if lx.accept(rawStringStart) {
|
||||
if lx.accept(rawStringStart) {
|
||||
lx.ignore() // Ignore """
|
||||
return lexMultilineRawString
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
lx.ignore() // ignore the "'"
|
||||
return lexRawString
|
||||
case '+', '-':
|
||||
return lexNumberStart
|
||||
case '.': // special error case, be kind to users
|
||||
return lx.errorf("floats must start with a digit, not '.'")
|
||||
}
|
||||
if unicode.IsLetter(r) {
|
||||
// Be permissive here; lexBool will give a nice error if the
|
||||
// user wrote something like
|
||||
// x = foo
|
||||
// (i.e. not 'true' or 'false' but is something else word-like.)
|
||||
lx.backup()
|
||||
return lexBool
|
||||
}
|
||||
return lx.errorf("expected value but found %q instead", r)
|
||||
}
|
||||
|
||||
// lexArrayValue consumes one value in an array. It assumes that '[' or ','
|
||||
// have already been consumed. All whitespace and newlines are ignored.
|
||||
func lexArrayValue(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r) || isNL(r):
|
||||
return lexSkip(lx, lexArrayValue)
|
||||
case r == commentStart:
|
||||
lx.push(lexArrayValue)
|
||||
return lexCommentStart
|
||||
case r == comma:
|
||||
return lx.errorf("unexpected comma")
|
||||
case r == arrayEnd:
|
||||
// NOTE(caleb): The spec isn't clear about whether you can have
|
||||
// a trailing comma or not, so we'll allow it.
|
||||
return lexArrayEnd
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.push(lexArrayValueEnd)
|
||||
return lexValue
|
||||
}
|
||||
|
||||
// lexArrayValueEnd consumes everything between the end of an array value and
|
||||
// the next value (or the end of the array): it ignores whitespace and newlines
|
||||
// and expects either a ',' or a ']'.
|
||||
func lexArrayValueEnd(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r) || isNL(r):
|
||||
return lexSkip(lx, lexArrayValueEnd)
|
||||
case r == commentStart:
|
||||
lx.push(lexArrayValueEnd)
|
||||
return lexCommentStart
|
||||
case r == comma:
|
||||
lx.ignore()
|
||||
return lexArrayValue // move on to the next value
|
||||
case r == arrayEnd:
|
||||
return lexArrayEnd
|
||||
}
|
||||
return lx.errorf(
|
||||
"expected a comma or array terminator %q, but got %q instead",
|
||||
arrayEnd, r,
|
||||
)
|
||||
}
|
||||
|
||||
// lexArrayEnd finishes the lexing of an array.
|
||||
// It assumes that a ']' has just been consumed.
|
||||
func lexArrayEnd(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
lx.emit(itemArrayEnd)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexInlineTableValue consumes one key/value pair in an inline table.
|
||||
// It assumes that '{' or ',' have already been consumed. Whitespace is ignored.
|
||||
func lexInlineTableValue(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r):
|
||||
return lexSkip(lx, lexInlineTableValue)
|
||||
case isNL(r):
|
||||
return lx.errorf("newlines not allowed within inline tables")
|
||||
case r == commentStart:
|
||||
lx.push(lexInlineTableValue)
|
||||
return lexCommentStart
|
||||
case r == comma:
|
||||
return lx.errorf("unexpected comma")
|
||||
case r == inlineTableEnd:
|
||||
return lexInlineTableEnd
|
||||
}
|
||||
lx.backup()
|
||||
lx.push(lexInlineTableValueEnd)
|
||||
return lexKeyStart
|
||||
}
|
||||
|
||||
// lexInlineTableValueEnd consumes everything between the end of an inline table
|
||||
// key/value pair and the next pair (or the end of the table):
|
||||
// it ignores whitespace and expects either a ',' or a '}'.
|
||||
func lexInlineTableValueEnd(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r):
|
||||
return lexSkip(lx, lexInlineTableValueEnd)
|
||||
case isNL(r):
|
||||
return lx.errorf("newlines not allowed within inline tables")
|
||||
case r == commentStart:
|
||||
lx.push(lexInlineTableValueEnd)
|
||||
return lexCommentStart
|
||||
case r == comma:
|
||||
lx.ignore()
|
||||
return lexInlineTableValue
|
||||
case r == inlineTableEnd:
|
||||
return lexInlineTableEnd
|
||||
}
|
||||
return lx.errorf("expected a comma or an inline table terminator %q, "+
|
||||
"but got %q instead", inlineTableEnd, r)
|
||||
}
|
||||
|
||||
// lexInlineTableEnd finishes the lexing of an inline table.
|
||||
// It assumes that a '}' has just been consumed.
|
||||
func lexInlineTableEnd(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
lx.emit(itemInlineTableEnd)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexString consumes the inner contents of a string. It assumes that the
|
||||
// beginning '"' has already been consumed and ignored.
|
||||
func lexString(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == eof:
|
||||
return lx.errorf("unexpected EOF")
|
||||
case isNL(r):
|
||||
return lx.errorf("strings cannot contain newlines")
|
||||
case r == '\\':
|
||||
lx.push(lexString)
|
||||
return lexStringEscape
|
||||
case r == stringEnd:
|
||||
lx.backup()
|
||||
lx.emit(itemString)
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
return lexString
|
||||
}
|
||||
|
||||
// lexMultilineString consumes the inner contents of a string. It assumes that
|
||||
// the beginning '"""' has already been consumed and ignored.
|
||||
func lexMultilineString(lx *lexer) stateFn {
|
||||
switch lx.next() {
|
||||
case eof:
|
||||
return lx.errorf("unexpected EOF")
|
||||
case '\\':
|
||||
return lexMultilineStringEscape
|
||||
case stringEnd:
|
||||
if lx.accept(stringEnd) {
|
||||
if lx.accept(stringEnd) {
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.emit(itemMultilineString)
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
}
|
||||
return lexMultilineString
|
||||
}
|
||||
|
||||
// lexRawString consumes a raw string. Nothing can be escaped in such a string.
|
||||
// It assumes that the beginning "'" has already been consumed and ignored.
|
||||
func lexRawString(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == eof:
|
||||
return lx.errorf("unexpected EOF")
|
||||
case isNL(r):
|
||||
return lx.errorf("strings cannot contain newlines")
|
||||
case r == rawStringEnd:
|
||||
lx.backup()
|
||||
lx.emit(itemRawString)
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
return lexRawString
|
||||
}
|
||||
|
||||
// lexMultilineRawString consumes a raw string. Nothing can be escaped in such
|
||||
// a string. It assumes that the beginning "'''" has already been consumed and
|
||||
// ignored.
|
||||
func lexMultilineRawString(lx *lexer) stateFn {
|
||||
switch lx.next() {
|
||||
case eof:
|
||||
return lx.errorf("unexpected EOF")
|
||||
case rawStringEnd:
|
||||
if lx.accept(rawStringEnd) {
|
||||
if lx.accept(rawStringEnd) {
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.emit(itemRawMultilineString)
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
}
|
||||
return lexMultilineRawString
|
||||
}
|
||||
|
||||
// lexMultilineStringEscape consumes an escaped character. It assumes that the
|
||||
// preceding '\\' has already been consumed.
|
||||
func lexMultilineStringEscape(lx *lexer) stateFn {
|
||||
// Handle the special case first:
|
||||
if isNL(lx.next()) {
|
||||
return lexMultilineString
|
||||
}
|
||||
lx.backup()
|
||||
lx.push(lexMultilineString)
|
||||
return lexStringEscape(lx)
|
||||
}
|
||||
|
||||
func lexStringEscape(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch r {
|
||||
case 'b':
|
||||
fallthrough
|
||||
case 't':
|
||||
fallthrough
|
||||
case 'n':
|
||||
fallthrough
|
||||
case 'f':
|
||||
fallthrough
|
||||
case 'r':
|
||||
fallthrough
|
||||
case '"':
|
||||
fallthrough
|
||||
case '\\':
|
||||
return lx.pop()
|
||||
case 'u':
|
||||
return lexShortUnicodeEscape
|
||||
case 'U':
|
||||
return lexLongUnicodeEscape
|
||||
}
|
||||
return lx.errorf("invalid escape character %q; only the following "+
|
||||
"escape characters are allowed: "+
|
||||
`\b, \t, \n, \f, \r, \", \\, \uXXXX, and \UXXXXXXXX`, r)
|
||||
}
|
||||
|
||||
func lexShortUnicodeEscape(lx *lexer) stateFn {
|
||||
var r rune
|
||||
for i := 0; i < 4; i++ {
|
||||
r = lx.next()
|
||||
if !isHexadecimal(r) {
|
||||
return lx.errorf(`expected four hexadecimal digits after '\u', `+
|
||||
"but got %q instead", lx.current())
|
||||
}
|
||||
}
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
func lexLongUnicodeEscape(lx *lexer) stateFn {
|
||||
var r rune
|
||||
for i := 0; i < 8; i++ {
|
||||
r = lx.next()
|
||||
if !isHexadecimal(r) {
|
||||
return lx.errorf(`expected eight hexadecimal digits after '\U', `+
|
||||
"but got %q instead", lx.current())
|
||||
}
|
||||
}
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexNumberOrDateStart consumes either an integer, a float, or datetime.
|
||||
func lexNumberOrDateStart(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isDigit(r) {
|
||||
return lexNumberOrDate
|
||||
}
|
||||
switch r {
|
||||
case '_':
|
||||
return lexNumber
|
||||
case 'e', 'E':
|
||||
return lexFloat
|
||||
case '.':
|
||||
return lx.errorf("floats must start with a digit, not '.'")
|
||||
}
|
||||
return lx.errorf("expected a digit but got %q", r)
|
||||
}
|
||||
|
||||
// lexNumberOrDate consumes either an integer, float or datetime.
|
||||
func lexNumberOrDate(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isDigit(r) {
|
||||
return lexNumberOrDate
|
||||
}
|
||||
switch r {
|
||||
case '-':
|
||||
return lexDatetime
|
||||
case '_':
|
||||
return lexNumber
|
||||
case '.', 'e', 'E':
|
||||
return lexFloat
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemInteger)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexDatetime consumes a Datetime, to a first approximation.
|
||||
// The parser validates that it matches one of the accepted formats.
|
||||
func lexDatetime(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isDigit(r) {
|
||||
return lexDatetime
|
||||
}
|
||||
switch r {
|
||||
case '-', 'T', ':', '.', 'Z', '+':
|
||||
return lexDatetime
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemDatetime)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexNumberStart consumes either an integer or a float. It assumes that a sign
|
||||
// has already been read, but that *no* digits have been consumed.
|
||||
// lexNumberStart will move to the appropriate integer or float states.
|
||||
func lexNumberStart(lx *lexer) stateFn {
|
||||
// We MUST see a digit. Even floats have to start with a digit.
|
||||
r := lx.next()
|
||||
if !isDigit(r) {
|
||||
if r == '.' {
|
||||
return lx.errorf("floats must start with a digit, not '.'")
|
||||
}
|
||||
return lx.errorf("expected a digit but got %q", r)
|
||||
}
|
||||
return lexNumber
|
||||
}
|
||||
|
||||
// lexNumber consumes an integer or a float after seeing the first digit.
|
||||
func lexNumber(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isDigit(r) {
|
||||
return lexNumber
|
||||
}
|
||||
switch r {
|
||||
case '_':
|
||||
return lexNumber
|
||||
case '.', 'e', 'E':
|
||||
return lexFloat
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemInteger)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexFloat consumes the elements of a float. It allows any sequence of
|
||||
// float-like characters, so floats emitted by the lexer are only a first
|
||||
// approximation and must be validated by the parser.
|
||||
func lexFloat(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isDigit(r) {
|
||||
return lexFloat
|
||||
}
|
||||
switch r {
|
||||
case '_', '.', '-', '+', 'e', 'E':
|
||||
return lexFloat
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemFloat)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexBool consumes a bool string: 'true' or 'false.
|
||||
func lexBool(lx *lexer) stateFn {
|
||||
var rs []rune
|
||||
for {
|
||||
r := lx.next()
|
||||
if !unicode.IsLetter(r) {
|
||||
lx.backup()
|
||||
break
|
||||
}
|
||||
rs = append(rs, r)
|
||||
}
|
||||
s := string(rs)
|
||||
switch s {
|
||||
case "true", "false":
|
||||
lx.emit(itemBool)
|
||||
return lx.pop()
|
||||
}
|
||||
return lx.errorf("expected value but found %q instead", s)
|
||||
}
|
||||
|
||||
// lexCommentStart begins the lexing of a comment. It will emit
|
||||
// itemCommentStart and consume no characters, passing control to lexComment.
|
||||
func lexCommentStart(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
lx.emit(itemCommentStart)
|
||||
return lexComment
|
||||
}
|
||||
|
||||
// lexComment lexes an entire comment. It assumes that '#' has been consumed.
|
||||
// It will consume *up to* the first newline character, and pass control
|
||||
// back to the last state on the stack.
|
||||
func lexComment(lx *lexer) stateFn {
|
||||
r := lx.peek()
|
||||
if isNL(r) || r == eof {
|
||||
lx.emit(itemText)
|
||||
return lx.pop()
|
||||
}
|
||||
lx.next()
|
||||
return lexComment
|
||||
}
|
||||
|
||||
// lexSkip ignores all slurped input and moves on to the next state.
|
||||
func lexSkip(lx *lexer, nextState stateFn) stateFn {
|
||||
return func(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
return nextState
|
||||
}
|
||||
}
|
||||
|
||||
// isWhitespace returns true if `r` is a whitespace character according
|
||||
// to the spec.
|
||||
func isWhitespace(r rune) bool {
|
||||
return r == '\t' || r == ' '
|
||||
}
|
||||
|
||||
func isNL(r rune) bool {
|
||||
return r == '\n' || r == '\r'
|
||||
}
|
||||
|
||||
func isDigit(r rune) bool {
|
||||
return r >= '0' && r <= '9'
|
||||
}
|
||||
|
||||
func isHexadecimal(r rune) bool {
|
||||
return (r >= '0' && r <= '9') ||
|
||||
(r >= 'a' && r <= 'f') ||
|
||||
(r >= 'A' && r <= 'F')
|
||||
}
|
||||
|
||||
func isBareKeyChar(r rune) bool {
|
||||
return (r >= 'A' && r <= 'Z') ||
|
||||
(r >= 'a' && r <= 'z') ||
|
||||
(r >= '0' && r <= '9') ||
|
||||
r == '_' ||
|
||||
r == '-'
|
||||
}
|
||||
|
||||
func (itype itemType) String() string {
|
||||
switch itype {
|
||||
case itemError:
|
||||
return "Error"
|
||||
case itemNIL:
|
||||
return "NIL"
|
||||
case itemEOF:
|
||||
return "EOF"
|
||||
case itemText:
|
||||
return "Text"
|
||||
case itemString, itemRawString, itemMultilineString, itemRawMultilineString:
|
||||
return "String"
|
||||
case itemBool:
|
||||
return "Bool"
|
||||
case itemInteger:
|
||||
return "Integer"
|
||||
case itemFloat:
|
||||
return "Float"
|
||||
case itemDatetime:
|
||||
return "DateTime"
|
||||
case itemTableStart:
|
||||
return "TableStart"
|
||||
case itemTableEnd:
|
||||
return "TableEnd"
|
||||
case itemKeyStart:
|
||||
return "KeyStart"
|
||||
case itemArray:
|
||||
return "Array"
|
||||
case itemArrayEnd:
|
||||
return "ArrayEnd"
|
||||
case itemCommentStart:
|
||||
return "CommentStart"
|
||||
}
|
||||
panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype)))
|
||||
}
|
||||
|
||||
func (item item) String() string {
|
||||
return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val)
|
||||
}
|
592
vendor/github.com/BurntSushi/toml/parse.go
generated
vendored
Normal file
592
vendor/github.com/BurntSushi/toml/parse.go
generated
vendored
Normal file
|
@ -0,0 +1,592 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type parser struct {
|
||||
mapping map[string]interface{}
|
||||
types map[string]tomlType
|
||||
lx *lexer
|
||||
|
||||
// A list of keys in the order that they appear in the TOML data.
|
||||
ordered []Key
|
||||
|
||||
// the full key for the current hash in scope
|
||||
context Key
|
||||
|
||||
// the base key name for everything except hashes
|
||||
currentKey string
|
||||
|
||||
// rough approximation of line number
|
||||
approxLine int
|
||||
|
||||
// A map of 'key.group.names' to whether they were created implicitly.
|
||||
implicits map[string]bool
|
||||
}
|
||||
|
||||
type parseError string
|
||||
|
||||
func (pe parseError) Error() string {
|
||||
return string(pe)
|
||||
}
|
||||
|
||||
func parse(data string) (p *parser, err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
var ok bool
|
||||
if err, ok = r.(parseError); ok {
|
||||
return
|
||||
}
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
|
||||
p = &parser{
|
||||
mapping: make(map[string]interface{}),
|
||||
types: make(map[string]tomlType),
|
||||
lx: lex(data),
|
||||
ordered: make([]Key, 0),
|
||||
implicits: make(map[string]bool),
|
||||
}
|
||||
for {
|
||||
item := p.next()
|
||||
if item.typ == itemEOF {
|
||||
break
|
||||
}
|
||||
p.topLevel(item)
|
||||
}
|
||||
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func (p *parser) panicf(format string, v ...interface{}) {
|
||||
msg := fmt.Sprintf("Near line %d (last key parsed '%s'): %s",
|
||||
p.approxLine, p.current(), fmt.Sprintf(format, v...))
|
||||
panic(parseError(msg))
|
||||
}
|
||||
|
||||
func (p *parser) next() item {
|
||||
it := p.lx.nextItem()
|
||||
if it.typ == itemError {
|
||||
p.panicf("%s", it.val)
|
||||
}
|
||||
return it
|
||||
}
|
||||
|
||||
func (p *parser) bug(format string, v ...interface{}) {
|
||||
panic(fmt.Sprintf("BUG: "+format+"\n\n", v...))
|
||||
}
|
||||
|
||||
func (p *parser) expect(typ itemType) item {
|
||||
it := p.next()
|
||||
p.assertEqual(typ, it.typ)
|
||||
return it
|
||||
}
|
||||
|
||||
func (p *parser) assertEqual(expected, got itemType) {
|
||||
if expected != got {
|
||||
p.bug("Expected '%s' but got '%s'.", expected, got)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) topLevel(item item) {
|
||||
switch item.typ {
|
||||
case itemCommentStart:
|
||||
p.approxLine = item.line
|
||||
p.expect(itemText)
|
||||
case itemTableStart:
|
||||
kg := p.next()
|
||||
p.approxLine = kg.line
|
||||
|
||||
var key Key
|
||||
for ; kg.typ != itemTableEnd && kg.typ != itemEOF; kg = p.next() {
|
||||
key = append(key, p.keyString(kg))
|
||||
}
|
||||
p.assertEqual(itemTableEnd, kg.typ)
|
||||
|
||||
p.establishContext(key, false)
|
||||
p.setType("", tomlHash)
|
||||
p.ordered = append(p.ordered, key)
|
||||
case itemArrayTableStart:
|
||||
kg := p.next()
|
||||
p.approxLine = kg.line
|
||||
|
||||
var key Key
|
||||
for ; kg.typ != itemArrayTableEnd && kg.typ != itemEOF; kg = p.next() {
|
||||
key = append(key, p.keyString(kg))
|
||||
}
|
||||
p.assertEqual(itemArrayTableEnd, kg.typ)
|
||||
|
||||
p.establishContext(key, true)
|
||||
p.setType("", tomlArrayHash)
|
||||
p.ordered = append(p.ordered, key)
|
||||
case itemKeyStart:
|
||||
kname := p.next()
|
||||
p.approxLine = kname.line
|
||||
p.currentKey = p.keyString(kname)
|
||||
|
||||
val, typ := p.value(p.next())
|
||||
p.setValue(p.currentKey, val)
|
||||
p.setType(p.currentKey, typ)
|
||||
p.ordered = append(p.ordered, p.context.add(p.currentKey))
|
||||
p.currentKey = ""
|
||||
default:
|
||||
p.bug("Unexpected type at top level: %s", item.typ)
|
||||
}
|
||||
}
|
||||
|
||||
// Gets a string for a key (or part of a key in a table name).
|
||||
func (p *parser) keyString(it item) string {
|
||||
switch it.typ {
|
||||
case itemText:
|
||||
return it.val
|
||||
case itemString, itemMultilineString,
|
||||
itemRawString, itemRawMultilineString:
|
||||
s, _ := p.value(it)
|
||||
return s.(string)
|
||||
default:
|
||||
p.bug("Unexpected key type: %s", it.typ)
|
||||
panic("unreachable")
|
||||
}
|
||||
}
|
||||
|
||||
// value translates an expected value from the lexer into a Go value wrapped
|
||||
// as an empty interface.
|
||||
func (p *parser) value(it item) (interface{}, tomlType) {
|
||||
switch it.typ {
|
||||
case itemString:
|
||||
return p.replaceEscapes(it.val), p.typeOfPrimitive(it)
|
||||
case itemMultilineString:
|
||||
trimmed := stripFirstNewline(stripEscapedWhitespace(it.val))
|
||||
return p.replaceEscapes(trimmed), p.typeOfPrimitive(it)
|
||||
case itemRawString:
|
||||
return it.val, p.typeOfPrimitive(it)
|
||||
case itemRawMultilineString:
|
||||
return stripFirstNewline(it.val), p.typeOfPrimitive(it)
|
||||
case itemBool:
|
||||
switch it.val {
|
||||
case "true":
|
||||
return true, p.typeOfPrimitive(it)
|
||||
case "false":
|
||||
return false, p.typeOfPrimitive(it)
|
||||
}
|
||||
p.bug("Expected boolean value, but got '%s'.", it.val)
|
||||
case itemInteger:
|
||||
if !numUnderscoresOK(it.val) {
|
||||
p.panicf("Invalid integer %q: underscores must be surrounded by digits",
|
||||
it.val)
|
||||
}
|
||||
val := strings.Replace(it.val, "_", "", -1)
|
||||
num, err := strconv.ParseInt(val, 10, 64)
|
||||
if err != nil {
|
||||
// Distinguish integer values. Normally, it'd be a bug if the lexer
|
||||
// provides an invalid integer, but it's possible that the number is
|
||||
// out of range of valid values (which the lexer cannot determine).
|
||||
// So mark the former as a bug but the latter as a legitimate user
|
||||
// error.
|
||||
if e, ok := err.(*strconv.NumError); ok &&
|
||||
e.Err == strconv.ErrRange {
|
||||
|
||||
p.panicf("Integer '%s' is out of the range of 64-bit "+
|
||||
"signed integers.", it.val)
|
||||
} else {
|
||||
p.bug("Expected integer value, but got '%s'.", it.val)
|
||||
}
|
||||
}
|
||||
return num, p.typeOfPrimitive(it)
|
||||
case itemFloat:
|
||||
parts := strings.FieldsFunc(it.val, func(r rune) bool {
|
||||
switch r {
|
||||
case '.', 'e', 'E':
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
for _, part := range parts {
|
||||
if !numUnderscoresOK(part) {
|
||||
p.panicf("Invalid float %q: underscores must be "+
|
||||
"surrounded by digits", it.val)
|
||||
}
|
||||
}
|
||||
if !numPeriodsOK(it.val) {
|
||||
// As a special case, numbers like '123.' or '1.e2',
|
||||
// which are valid as far as Go/strconv are concerned,
|
||||
// must be rejected because TOML says that a fractional
|
||||
// part consists of '.' followed by 1+ digits.
|
||||
p.panicf("Invalid float %q: '.' must be followed "+
|
||||
"by one or more digits", it.val)
|
||||
}
|
||||
val := strings.Replace(it.val, "_", "", -1)
|
||||
num, err := strconv.ParseFloat(val, 64)
|
||||
if err != nil {
|
||||
if e, ok := err.(*strconv.NumError); ok &&
|
||||
e.Err == strconv.ErrRange {
|
||||
|
||||
p.panicf("Float '%s' is out of the range of 64-bit "+
|
||||
"IEEE-754 floating-point numbers.", it.val)
|
||||
} else {
|
||||
p.panicf("Invalid float value: %q", it.val)
|
||||
}
|
||||
}
|
||||
return num, p.typeOfPrimitive(it)
|
||||
case itemDatetime:
|
||||
var t time.Time
|
||||
var ok bool
|
||||
var err error
|
||||
for _, format := range []string{
|
||||
"2006-01-02T15:04:05Z07:00",
|
||||
"2006-01-02T15:04:05",
|
||||
"2006-01-02",
|
||||
} {
|
||||
t, err = time.ParseInLocation(format, it.val, time.Local)
|
||||
if err == nil {
|
||||
ok = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !ok {
|
||||
p.panicf("Invalid TOML Datetime: %q.", it.val)
|
||||
}
|
||||
return t, p.typeOfPrimitive(it)
|
||||
case itemArray:
|
||||
array := make([]interface{}, 0)
|
||||
types := make([]tomlType, 0)
|
||||
|
||||
for it = p.next(); it.typ != itemArrayEnd; it = p.next() {
|
||||
if it.typ == itemCommentStart {
|
||||
p.expect(itemText)
|
||||
continue
|
||||
}
|
||||
|
||||
val, typ := p.value(it)
|
||||
array = append(array, val)
|
||||
types = append(types, typ)
|
||||
}
|
||||
return array, p.typeOfArray(types)
|
||||
case itemInlineTableStart:
|
||||
var (
|
||||
hash = make(map[string]interface{})
|
||||
outerContext = p.context
|
||||
outerKey = p.currentKey
|
||||
)
|
||||
|
||||
p.context = append(p.context, p.currentKey)
|
||||
p.currentKey = ""
|
||||
for it := p.next(); it.typ != itemInlineTableEnd; it = p.next() {
|
||||
if it.typ != itemKeyStart {
|
||||
p.bug("Expected key start but instead found %q, around line %d",
|
||||
it.val, p.approxLine)
|
||||
}
|
||||
if it.typ == itemCommentStart {
|
||||
p.expect(itemText)
|
||||
continue
|
||||
}
|
||||
|
||||
// retrieve key
|
||||
k := p.next()
|
||||
p.approxLine = k.line
|
||||
kname := p.keyString(k)
|
||||
|
||||
// retrieve value
|
||||
p.currentKey = kname
|
||||
val, typ := p.value(p.next())
|
||||
// make sure we keep metadata up to date
|
||||
p.setType(kname, typ)
|
||||
p.ordered = append(p.ordered, p.context.add(p.currentKey))
|
||||
hash[kname] = val
|
||||
}
|
||||
p.context = outerContext
|
||||
p.currentKey = outerKey
|
||||
return hash, tomlHash
|
||||
}
|
||||
p.bug("Unexpected value type: %s", it.typ)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// numUnderscoresOK checks whether each underscore in s is surrounded by
|
||||
// characters that are not underscores.
|
||||
func numUnderscoresOK(s string) bool {
|
||||
accept := false
|
||||
for _, r := range s {
|
||||
if r == '_' {
|
||||
if !accept {
|
||||
return false
|
||||
}
|
||||
accept = false
|
||||
continue
|
||||
}
|
||||
accept = true
|
||||
}
|
||||
return accept
|
||||
}
|
||||
|
||||
// numPeriodsOK checks whether every period in s is followed by a digit.
|
||||
func numPeriodsOK(s string) bool {
|
||||
period := false
|
||||
for _, r := range s {
|
||||
if period && !isDigit(r) {
|
||||
return false
|
||||
}
|
||||
period = r == '.'
|
||||
}
|
||||
return !period
|
||||
}
|
||||
|
||||
// establishContext sets the current context of the parser,
|
||||
// where the context is either a hash or an array of hashes. Which one is
|
||||
// set depends on the value of the `array` parameter.
|
||||
//
|
||||
// Establishing the context also makes sure that the key isn't a duplicate, and
|
||||
// will create implicit hashes automatically.
|
||||
func (p *parser) establishContext(key Key, array bool) {
|
||||
var ok bool
|
||||
|
||||
// Always start at the top level and drill down for our context.
|
||||
hashContext := p.mapping
|
||||
keyContext := make(Key, 0)
|
||||
|
||||
// We only need implicit hashes for key[0:-1]
|
||||
for _, k := range key[0 : len(key)-1] {
|
||||
_, ok = hashContext[k]
|
||||
keyContext = append(keyContext, k)
|
||||
|
||||
// No key? Make an implicit hash and move on.
|
||||
if !ok {
|
||||
p.addImplicit(keyContext)
|
||||
hashContext[k] = make(map[string]interface{})
|
||||
}
|
||||
|
||||
// If the hash context is actually an array of tables, then set
|
||||
// the hash context to the last element in that array.
|
||||
//
|
||||
// Otherwise, it better be a table, since this MUST be a key group (by
|
||||
// virtue of it not being the last element in a key).
|
||||
switch t := hashContext[k].(type) {
|
||||
case []map[string]interface{}:
|
||||
hashContext = t[len(t)-1]
|
||||
case map[string]interface{}:
|
||||
hashContext = t
|
||||
default:
|
||||
p.panicf("Key '%s' was already created as a hash.", keyContext)
|
||||
}
|
||||
}
|
||||
|
||||
p.context = keyContext
|
||||
if array {
|
||||
// If this is the first element for this array, then allocate a new
|
||||
// list of tables for it.
|
||||
k := key[len(key)-1]
|
||||
if _, ok := hashContext[k]; !ok {
|
||||
hashContext[k] = make([]map[string]interface{}, 0, 5)
|
||||
}
|
||||
|
||||
// Add a new table. But make sure the key hasn't already been used
|
||||
// for something else.
|
||||
if hash, ok := hashContext[k].([]map[string]interface{}); ok {
|
||||
hashContext[k] = append(hash, make(map[string]interface{}))
|
||||
} else {
|
||||
p.panicf("Key '%s' was already created and cannot be used as "+
|
||||
"an array.", keyContext)
|
||||
}
|
||||
} else {
|
||||
p.setValue(key[len(key)-1], make(map[string]interface{}))
|
||||
}
|
||||
p.context = append(p.context, key[len(key)-1])
|
||||
}
|
||||
|
||||
// setValue sets the given key to the given value in the current context.
|
||||
// It will make sure that the key hasn't already been defined, account for
|
||||
// implicit key groups.
|
||||
func (p *parser) setValue(key string, value interface{}) {
|
||||
var tmpHash interface{}
|
||||
var ok bool
|
||||
|
||||
hash := p.mapping
|
||||
keyContext := make(Key, 0)
|
||||
for _, k := range p.context {
|
||||
keyContext = append(keyContext, k)
|
||||
if tmpHash, ok = hash[k]; !ok {
|
||||
p.bug("Context for key '%s' has not been established.", keyContext)
|
||||
}
|
||||
switch t := tmpHash.(type) {
|
||||
case []map[string]interface{}:
|
||||
// The context is a table of hashes. Pick the most recent table
|
||||
// defined as the current hash.
|
||||
hash = t[len(t)-1]
|
||||
case map[string]interface{}:
|
||||
hash = t
|
||||
default:
|
||||
p.bug("Expected hash to have type 'map[string]interface{}', but "+
|
||||
"it has '%T' instead.", tmpHash)
|
||||
}
|
||||
}
|
||||
keyContext = append(keyContext, key)
|
||||
|
||||
if _, ok := hash[key]; ok {
|
||||
// Typically, if the given key has already been set, then we have
|
||||
// to raise an error since duplicate keys are disallowed. However,
|
||||
// it's possible that a key was previously defined implicitly. In this
|
||||
// case, it is allowed to be redefined concretely. (See the
|
||||
// `tests/valid/implicit-and-explicit-after.toml` test in `toml-test`.)
|
||||
//
|
||||
// But we have to make sure to stop marking it as an implicit. (So that
|
||||
// another redefinition provokes an error.)
|
||||
//
|
||||
// Note that since it has already been defined (as a hash), we don't
|
||||
// want to overwrite it. So our business is done.
|
||||
if p.isImplicit(keyContext) {
|
||||
p.removeImplicit(keyContext)
|
||||
return
|
||||
}
|
||||
|
||||
// Otherwise, we have a concrete key trying to override a previous
|
||||
// key, which is *always* wrong.
|
||||
p.panicf("Key '%s' has already been defined.", keyContext)
|
||||
}
|
||||
hash[key] = value
|
||||
}
|
||||
|
||||
// setType sets the type of a particular value at a given key.
|
||||
// It should be called immediately AFTER setValue.
|
||||
//
|
||||
// Note that if `key` is empty, then the type given will be applied to the
|
||||
// current context (which is either a table or an array of tables).
|
||||
func (p *parser) setType(key string, typ tomlType) {
|
||||
keyContext := make(Key, 0, len(p.context)+1)
|
||||
for _, k := range p.context {
|
||||
keyContext = append(keyContext, k)
|
||||
}
|
||||
if len(key) > 0 { // allow type setting for hashes
|
||||
keyContext = append(keyContext, key)
|
||||
}
|
||||
p.types[keyContext.String()] = typ
|
||||
}
|
||||
|
||||
// addImplicit sets the given Key as having been created implicitly.
|
||||
func (p *parser) addImplicit(key Key) {
|
||||
p.implicits[key.String()] = true
|
||||
}
|
||||
|
||||
// removeImplicit stops tagging the given key as having been implicitly
|
||||
// created.
|
||||
func (p *parser) removeImplicit(key Key) {
|
||||
p.implicits[key.String()] = false
|
||||
}
|
||||
|
||||
// isImplicit returns true if the key group pointed to by the key was created
|
||||
// implicitly.
|
||||
func (p *parser) isImplicit(key Key) bool {
|
||||
return p.implicits[key.String()]
|
||||
}
|
||||
|
||||
// current returns the full key name of the current context.
|
||||
func (p *parser) current() string {
|
||||
if len(p.currentKey) == 0 {
|
||||
return p.context.String()
|
||||
}
|
||||
if len(p.context) == 0 {
|
||||
return p.currentKey
|
||||
}
|
||||
return fmt.Sprintf("%s.%s", p.context, p.currentKey)
|
||||
}
|
||||
|
||||
func stripFirstNewline(s string) string {
|
||||
if len(s) == 0 || s[0] != '\n' {
|
||||
return s
|
||||
}
|
||||
return s[1:]
|
||||
}
|
||||
|
||||
func stripEscapedWhitespace(s string) string {
|
||||
esc := strings.Split(s, "\\\n")
|
||||
if len(esc) > 1 {
|
||||
for i := 1; i < len(esc); i++ {
|
||||
esc[i] = strings.TrimLeftFunc(esc[i], unicode.IsSpace)
|
||||
}
|
||||
}
|
||||
return strings.Join(esc, "")
|
||||
}
|
||||
|
||||
func (p *parser) replaceEscapes(str string) string {
|
||||
var replaced []rune
|
||||
s := []byte(str)
|
||||
r := 0
|
||||
for r < len(s) {
|
||||
if s[r] != '\\' {
|
||||
c, size := utf8.DecodeRune(s[r:])
|
||||
r += size
|
||||
replaced = append(replaced, c)
|
||||
continue
|
||||
}
|
||||
r += 1
|
||||
if r >= len(s) {
|
||||
p.bug("Escape sequence at end of string.")
|
||||
return ""
|
||||
}
|
||||
switch s[r] {
|
||||
default:
|
||||
p.bug("Expected valid escape code after \\, but got %q.", s[r])
|
||||
return ""
|
||||
case 'b':
|
||||
replaced = append(replaced, rune(0x0008))
|
||||
r += 1
|
||||
case 't':
|
||||
replaced = append(replaced, rune(0x0009))
|
||||
r += 1
|
||||
case 'n':
|
||||
replaced = append(replaced, rune(0x000A))
|
||||
r += 1
|
||||
case 'f':
|
||||
replaced = append(replaced, rune(0x000C))
|
||||
r += 1
|
||||
case 'r':
|
||||
replaced = append(replaced, rune(0x000D))
|
||||
r += 1
|
||||
case '"':
|
||||
replaced = append(replaced, rune(0x0022))
|
||||
r += 1
|
||||
case '\\':
|
||||
replaced = append(replaced, rune(0x005C))
|
||||
r += 1
|
||||
case 'u':
|
||||
// At this point, we know we have a Unicode escape of the form
|
||||
// `uXXXX` at [r, r+5). (Because the lexer guarantees this
|
||||
// for us.)
|
||||
escaped := p.asciiEscapeToUnicode(s[r+1 : r+5])
|
||||
replaced = append(replaced, escaped)
|
||||
r += 5
|
||||
case 'U':
|
||||
// At this point, we know we have a Unicode escape of the form
|
||||
// `uXXXX` at [r, r+9). (Because the lexer guarantees this
|
||||
// for us.)
|
||||
escaped := p.asciiEscapeToUnicode(s[r+1 : r+9])
|
||||
replaced = append(replaced, escaped)
|
||||
r += 9
|
||||
}
|
||||
}
|
||||
return string(replaced)
|
||||
}
|
||||
|
||||
func (p *parser) asciiEscapeToUnicode(bs []byte) rune {
|
||||
s := string(bs)
|
||||
hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32)
|
||||
if err != nil {
|
||||
p.bug("Could not parse '%s' as a hexadecimal number, but the "+
|
||||
"lexer claims it's OK: %s", s, err)
|
||||
}
|
||||
if !utf8.ValidRune(rune(hex)) {
|
||||
p.panicf("Escaped character '\\u%s' is not valid UTF-8.", s)
|
||||
}
|
||||
return rune(hex)
|
||||
}
|
||||
|
||||
func isStringType(ty itemType) bool {
|
||||
return ty == itemString || ty == itemMultilineString ||
|
||||
ty == itemRawString || ty == itemRawMultilineString
|
||||
}
|
1
vendor/github.com/BurntSushi/toml/session.vim
generated
vendored
Normal file
1
vendor/github.com/BurntSushi/toml/session.vim
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
au BufWritePost *.go silent!make tags > /dev/null 2>&1
|
91
vendor/github.com/BurntSushi/toml/type_check.go
generated
vendored
Normal file
91
vendor/github.com/BurntSushi/toml/type_check.go
generated
vendored
Normal file
|
@ -0,0 +1,91 @@
|
|||
package toml
|
||||
|
||||
// tomlType represents any Go type that corresponds to a TOML type.
|
||||
// While the first draft of the TOML spec has a simplistic type system that
|
||||
// probably doesn't need this level of sophistication, we seem to be militating
|
||||
// toward adding real composite types.
|
||||
type tomlType interface {
|
||||
typeString() string
|
||||
}
|
||||
|
||||
// typeEqual accepts any two types and returns true if they are equal.
|
||||
func typeEqual(t1, t2 tomlType) bool {
|
||||
if t1 == nil || t2 == nil {
|
||||
return false
|
||||
}
|
||||
return t1.typeString() == t2.typeString()
|
||||
}
|
||||
|
||||
func typeIsHash(t tomlType) bool {
|
||||
return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash)
|
||||
}
|
||||
|
||||
type tomlBaseType string
|
||||
|
||||
func (btype tomlBaseType) typeString() string {
|
||||
return string(btype)
|
||||
}
|
||||
|
||||
func (btype tomlBaseType) String() string {
|
||||
return btype.typeString()
|
||||
}
|
||||
|
||||
var (
|
||||
tomlInteger tomlBaseType = "Integer"
|
||||
tomlFloat tomlBaseType = "Float"
|
||||
tomlDatetime tomlBaseType = "Datetime"
|
||||
tomlString tomlBaseType = "String"
|
||||
tomlBool tomlBaseType = "Bool"
|
||||
tomlArray tomlBaseType = "Array"
|
||||
tomlHash tomlBaseType = "Hash"
|
||||
tomlArrayHash tomlBaseType = "ArrayHash"
|
||||
)
|
||||
|
||||
// typeOfPrimitive returns a tomlType of any primitive value in TOML.
|
||||
// Primitive values are: Integer, Float, Datetime, String and Bool.
|
||||
//
|
||||
// Passing a lexer item other than the following will cause a BUG message
|
||||
// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime.
|
||||
func (p *parser) typeOfPrimitive(lexItem item) tomlType {
|
||||
switch lexItem.typ {
|
||||
case itemInteger:
|
||||
return tomlInteger
|
||||
case itemFloat:
|
||||
return tomlFloat
|
||||
case itemDatetime:
|
||||
return tomlDatetime
|
||||
case itemString:
|
||||
return tomlString
|
||||
case itemMultilineString:
|
||||
return tomlString
|
||||
case itemRawString:
|
||||
return tomlString
|
||||
case itemRawMultilineString:
|
||||
return tomlString
|
||||
case itemBool:
|
||||
return tomlBool
|
||||
}
|
||||
p.bug("Cannot infer primitive type of lex item '%s'.", lexItem)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// typeOfArray returns a tomlType for an array given a list of types of its
|
||||
// values.
|
||||
//
|
||||
// In the current spec, if an array is homogeneous, then its type is always
|
||||
// "Array". If the array is not homogeneous, an error is generated.
|
||||
func (p *parser) typeOfArray(types []tomlType) tomlType {
|
||||
// Empty arrays are cool.
|
||||
if len(types) == 0 {
|
||||
return tomlArray
|
||||
}
|
||||
|
||||
theType := types[0]
|
||||
for _, t := range types[1:] {
|
||||
if !typeEqual(theType, t) {
|
||||
p.panicf("Array contains values of type '%s' and '%s', but "+
|
||||
"arrays must be homogeneous.", theType, t)
|
||||
}
|
||||
}
|
||||
return tomlArray
|
||||
}
|
242
vendor/github.com/BurntSushi/toml/type_fields.go
generated
vendored
Normal file
242
vendor/github.com/BurntSushi/toml/type_fields.go
generated
vendored
Normal file
|
@ -0,0 +1,242 @@
|
|||
package toml
|
||||
|
||||
// Struct field handling is adapted from code in encoding/json:
|
||||
//
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the Go distribution.
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// A field represents a single field found in a struct.
|
||||
type field struct {
|
||||
name string // the name of the field (`toml` tag included)
|
||||
tag bool // whether field has a `toml` tag
|
||||
index []int // represents the depth of an anonymous field
|
||||
typ reflect.Type // the type of the field
|
||||
}
|
||||
|
||||
// byName sorts field by name, breaking ties with depth,
|
||||
// then breaking ties with "name came from toml tag", then
|
||||
// breaking ties with index sequence.
|
||||
type byName []field
|
||||
|
||||
func (x byName) Len() int { return len(x) }
|
||||
|
||||
func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||
|
||||
func (x byName) Less(i, j int) bool {
|
||||
if x[i].name != x[j].name {
|
||||
return x[i].name < x[j].name
|
||||
}
|
||||
if len(x[i].index) != len(x[j].index) {
|
||||
return len(x[i].index) < len(x[j].index)
|
||||
}
|
||||
if x[i].tag != x[j].tag {
|
||||
return x[i].tag
|
||||
}
|
||||
return byIndex(x).Less(i, j)
|
||||
}
|
||||
|
||||
// byIndex sorts field by index sequence.
|
||||
type byIndex []field
|
||||
|
||||
func (x byIndex) Len() int { return len(x) }
|
||||
|
||||
func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||
|
||||
func (x byIndex) Less(i, j int) bool {
|
||||
for k, xik := range x[i].index {
|
||||
if k >= len(x[j].index) {
|
||||
return false
|
||||
}
|
||||
if xik != x[j].index[k] {
|
||||
return xik < x[j].index[k]
|
||||
}
|
||||
}
|
||||
return len(x[i].index) < len(x[j].index)
|
||||
}
|
||||
|
||||
// typeFields returns a list of fields that TOML should recognize for the given
|
||||
// type. The algorithm is breadth-first search over the set of structs to
|
||||
// include - the top struct and then any reachable anonymous structs.
|
||||
func typeFields(t reflect.Type) []field {
|
||||
// Anonymous fields to explore at the current level and the next.
|
||||
current := []field{}
|
||||
next := []field{{typ: t}}
|
||||
|
||||
// Count of queued names for current level and the next.
|
||||
count := map[reflect.Type]int{}
|
||||
nextCount := map[reflect.Type]int{}
|
||||
|
||||
// Types already visited at an earlier level.
|
||||
visited := map[reflect.Type]bool{}
|
||||
|
||||
// Fields found.
|
||||
var fields []field
|
||||
|
||||
for len(next) > 0 {
|
||||
current, next = next, current[:0]
|
||||
count, nextCount = nextCount, map[reflect.Type]int{}
|
||||
|
||||
for _, f := range current {
|
||||
if visited[f.typ] {
|
||||
continue
|
||||
}
|
||||
visited[f.typ] = true
|
||||
|
||||
// Scan f.typ for fields to include.
|
||||
for i := 0; i < f.typ.NumField(); i++ {
|
||||
sf := f.typ.Field(i)
|
||||
if sf.PkgPath != "" && !sf.Anonymous { // unexported
|
||||
continue
|
||||
}
|
||||
opts := getOptions(sf.Tag)
|
||||
if opts.skip {
|
||||
continue
|
||||
}
|
||||
index := make([]int, len(f.index)+1)
|
||||
copy(index, f.index)
|
||||
index[len(f.index)] = i
|
||||
|
||||
ft := sf.Type
|
||||
if ft.Name() == "" && ft.Kind() == reflect.Ptr {
|
||||
// Follow pointer.
|
||||
ft = ft.Elem()
|
||||
}
|
||||
|
||||
// Record found field and index sequence.
|
||||
if opts.name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct {
|
||||
tagged := opts.name != ""
|
||||
name := opts.name
|
||||
if name == "" {
|
||||
name = sf.Name
|
||||
}
|
||||
fields = append(fields, field{name, tagged, index, ft})
|
||||
if count[f.typ] > 1 {
|
||||
// If there were multiple instances, add a second,
|
||||
// so that the annihilation code will see a duplicate.
|
||||
// It only cares about the distinction between 1 or 2,
|
||||
// so don't bother generating any more copies.
|
||||
fields = append(fields, fields[len(fields)-1])
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Record new anonymous struct to explore in next round.
|
||||
nextCount[ft]++
|
||||
if nextCount[ft] == 1 {
|
||||
f := field{name: ft.Name(), index: index, typ: ft}
|
||||
next = append(next, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sort.Sort(byName(fields))
|
||||
|
||||
// Delete all fields that are hidden by the Go rules for embedded fields,
|
||||
// except that fields with TOML tags are promoted.
|
||||
|
||||
// The fields are sorted in primary order of name, secondary order
|
||||
// of field index length. Loop over names; for each name, delete
|
||||
// hidden fields by choosing the one dominant field that survives.
|
||||
out := fields[:0]
|
||||
for advance, i := 0, 0; i < len(fields); i += advance {
|
||||
// One iteration per name.
|
||||
// Find the sequence of fields with the name of this first field.
|
||||
fi := fields[i]
|
||||
name := fi.name
|
||||
for advance = 1; i+advance < len(fields); advance++ {
|
||||
fj := fields[i+advance]
|
||||
if fj.name != name {
|
||||
break
|
||||
}
|
||||
}
|
||||
if advance == 1 { // Only one field with this name
|
||||
out = append(out, fi)
|
||||
continue
|
||||
}
|
||||
dominant, ok := dominantField(fields[i : i+advance])
|
||||
if ok {
|
||||
out = append(out, dominant)
|
||||
}
|
||||
}
|
||||
|
||||
fields = out
|
||||
sort.Sort(byIndex(fields))
|
||||
|
||||
return fields
|
||||
}
|
||||
|
||||
// dominantField looks through the fields, all of which are known to
|
||||
// have the same name, to find the single field that dominates the
|
||||
// others using Go's embedding rules, modified by the presence of
|
||||
// TOML tags. If there are multiple top-level fields, the boolean
|
||||
// will be false: This condition is an error in Go and we skip all
|
||||
// the fields.
|
||||
func dominantField(fields []field) (field, bool) {
|
||||
// The fields are sorted in increasing index-length order. The winner
|
||||
// must therefore be one with the shortest index length. Drop all
|
||||
// longer entries, which is easy: just truncate the slice.
|
||||
length := len(fields[0].index)
|
||||
tagged := -1 // Index of first tagged field.
|
||||
for i, f := range fields {
|
||||
if len(f.index) > length {
|
||||
fields = fields[:i]
|
||||
break
|
||||
}
|
||||
if f.tag {
|
||||
if tagged >= 0 {
|
||||
// Multiple tagged fields at the same level: conflict.
|
||||
// Return no field.
|
||||
return field{}, false
|
||||
}
|
||||
tagged = i
|
||||
}
|
||||
}
|
||||
if tagged >= 0 {
|
||||
return fields[tagged], true
|
||||
}
|
||||
// All remaining fields have the same length. If there's more than one,
|
||||
// we have a conflict (two fields named "X" at the same level) and we
|
||||
// return no field.
|
||||
if len(fields) > 1 {
|
||||
return field{}, false
|
||||
}
|
||||
return fields[0], true
|
||||
}
|
||||
|
||||
var fieldCache struct {
|
||||
sync.RWMutex
|
||||
m map[reflect.Type][]field
|
||||
}
|
||||
|
||||
// cachedTypeFields is like typeFields but uses a cache to avoid repeated work.
|
||||
func cachedTypeFields(t reflect.Type) []field {
|
||||
fieldCache.RLock()
|
||||
f := fieldCache.m[t]
|
||||
fieldCache.RUnlock()
|
||||
if f != nil {
|
||||
return f
|
||||
}
|
||||
|
||||
// Compute fields without lock.
|
||||
// Might duplicate effort but won't hold other computations back.
|
||||
f = typeFields(t)
|
||||
if f == nil {
|
||||
f = []field{}
|
||||
}
|
||||
|
||||
fieldCache.Lock()
|
||||
if fieldCache.m == nil {
|
||||
fieldCache.m = map[reflect.Type][]field{}
|
||||
}
|
||||
fieldCache.m[t] = f
|
||||
fieldCache.Unlock()
|
||||
return f
|
||||
}
|
40
vendor/github.com/golangci/go-tools/arg/arg.go
generated
vendored
Normal file
40
vendor/github.com/golangci/go-tools/arg/arg.go
generated
vendored
Normal file
|
@ -0,0 +1,40 @@
|
|||
package arg
|
||||
|
||||
var args = map[string]int{
|
||||
"(*sync.Pool).Put.x": 0,
|
||||
"(*text/template.Template).Parse.text": 0,
|
||||
"(io.Seeker).Seek.offset": 0,
|
||||
"(time.Time).Sub.u": 0,
|
||||
"append.elems": 1,
|
||||
"append.slice": 0,
|
||||
"bytes.Equal.a": 0,
|
||||
"bytes.Equal.b": 1,
|
||||
"encoding/binary.Write.data": 2,
|
||||
"errors.New.text": 0,
|
||||
"fmt.Printf.format": 0,
|
||||
"fmt.Fprintf.format": 1,
|
||||
"fmt.Sprintf.a[0]": 1,
|
||||
"fmt.Sprintf.format": 0,
|
||||
"len.v": 0,
|
||||
"make.size[0]": 1,
|
||||
"make.size[1]": 2,
|
||||
"make.t": 0,
|
||||
"net/url.Parse.rawurl": 0,
|
||||
"os.OpenFile.flag": 1,
|
||||
"os/exec.Command.name": 0,
|
||||
"os/signal.Notify.c": 0,
|
||||
"regexp.Compile.expr": 0,
|
||||
"runtime.SetFinalizer.finalizer": 1,
|
||||
"runtime.SetFinalizer.obj": 0,
|
||||
"sort.Sort.data": 0,
|
||||
"time.Parse.layout": 0,
|
||||
"time.Sleep.d": 0,
|
||||
}
|
||||
|
||||
func Arg(name string) int {
|
||||
n, ok := args[name]
|
||||
if !ok {
|
||||
panic("unknown argument " + name)
|
||||
}
|
||||
return n
|
||||
}
|
|
@ -32,7 +32,7 @@ in the call graph; they are treated like built-in operators of the
|
|||
language.
|
||||
|
||||
*/
|
||||
package callgraph // import "github.com/golangci/tools/go/callgraph"
|
||||
package callgraph // import "github.com/golangci/go-tools/callgraph"
|
||||
|
||||
// TODO(adonovan): add a function to eliminate wrappers from the
|
||||
// callgraph, preserving topology.
|
||||
|
@ -43,7 +43,7 @@ import (
|
|||
"fmt"
|
||||
"go/token"
|
||||
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
"github.com/golangci/go-tools/ssa"
|
||||
)
|
||||
|
||||
// A Graph represents a call graph.
|
|
@ -1,11 +1,11 @@
|
|||
// Package static computes the call graph of a Go program containing
|
||||
// only static call edges.
|
||||
package static // import "github.com/golangci/tools/go/callgraph/static"
|
||||
package static // import "github.com/golangci/go-tools/callgraph/static"
|
||||
|
||||
import (
|
||||
"github.com/golangci/tools/go/callgraph"
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
"github.com/golangci/tools/go/ssa/ssautil"
|
||||
"github.com/golangci/go-tools/callgraph"
|
||||
"github.com/golangci/go-tools/ssa"
|
||||
"github.com/golangci/go-tools/ssa/ssautil"
|
||||
)
|
||||
|
||||
// CallGraph computes the call graph of the specified program
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
package callgraph
|
||||
|
||||
import "github.com/golangci/tools/go/ssa"
|
||||
import "github.com/golangci/go-tools/ssa"
|
||||
|
||||
// This file provides various utilities over call graphs, such as
|
||||
// visitation and path search.
|
162
vendor/github.com/golangci/go-tools/config/config.go
generated
vendored
Normal file
162
vendor/github.com/golangci/go-tools/config/config.go
generated
vendored
Normal file
|
@ -0,0 +1,162 @@
|
|||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
)
|
||||
|
||||
func mergeLists(a, b []string) []string {
|
||||
out := make([]string, 0, len(a)+len(b))
|
||||
for _, el := range b {
|
||||
if el == "inherit" {
|
||||
out = append(out, a...)
|
||||
} else {
|
||||
out = append(out, el)
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func normalizeList(list []string) []string {
|
||||
if len(list) > 1 {
|
||||
nlist := make([]string, 0, len(list))
|
||||
nlist = append(nlist, list[0])
|
||||
for i, el := range list[1:] {
|
||||
if el != list[i] {
|
||||
nlist = append(nlist, el)
|
||||
}
|
||||
}
|
||||
list = nlist
|
||||
}
|
||||
|
||||
for _, el := range list {
|
||||
if el == "inherit" {
|
||||
// This should never happen, because the default config
|
||||
// should not use "inherit"
|
||||
panic(`unresolved "inherit"`)
|
||||
}
|
||||
}
|
||||
|
||||
return list
|
||||
}
|
||||
|
||||
func (cfg Config) Merge(ocfg Config) Config {
|
||||
if ocfg.Checks != nil {
|
||||
cfg.Checks = mergeLists(cfg.Checks, ocfg.Checks)
|
||||
}
|
||||
if ocfg.Initialisms != nil {
|
||||
cfg.Initialisms = mergeLists(cfg.Initialisms, ocfg.Initialisms)
|
||||
}
|
||||
if ocfg.DotImportWhitelist != nil {
|
||||
cfg.DotImportWhitelist = mergeLists(cfg.DotImportWhitelist, ocfg.DotImportWhitelist)
|
||||
}
|
||||
if ocfg.HTTPStatusCodeWhitelist != nil {
|
||||
cfg.HTTPStatusCodeWhitelist = mergeLists(cfg.HTTPStatusCodeWhitelist, ocfg.HTTPStatusCodeWhitelist)
|
||||
}
|
||||
return cfg
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
// TODO(dh): this implementation makes it impossible for external
|
||||
// clients to add their own checkers with configuration. At the
|
||||
// moment, we don't really care about that; we don't encourage
|
||||
// that people use this package. In the future, we may. The
|
||||
// obvious solution would be using map[string]interface{}, but
|
||||
// that's obviously subpar.
|
||||
|
||||
Checks []string `toml:"checks"`
|
||||
Initialisms []string `toml:"initialisms"`
|
||||
DotImportWhitelist []string `toml:"dot_import_whitelist"`
|
||||
HTTPStatusCodeWhitelist []string `toml:"http_status_code_whitelist"`
|
||||
}
|
||||
|
||||
var defaultConfig = Config{
|
||||
Checks: []string{"all", "-ST1000", "-ST1003", "-ST1016"},
|
||||
Initialisms: []string{
|
||||
"ACL", "API", "ASCII", "CPU", "CSS", "DNS",
|
||||
"EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID",
|
||||
"IP", "JSON", "QPS", "RAM", "RPC", "SLA",
|
||||
"SMTP", "SQL", "SSH", "TCP", "TLS", "TTL",
|
||||
"UDP", "UI", "GID", "UID", "UUID", "URI",
|
||||
"URL", "UTF8", "VM", "XML", "XMPP", "XSRF",
|
||||
"XSS",
|
||||
},
|
||||
DotImportWhitelist: []string{},
|
||||
HTTPStatusCodeWhitelist: []string{"200", "400", "404", "500"},
|
||||
}
|
||||
|
||||
const configName = "staticcheck.conf"
|
||||
|
||||
func parseConfigs(dir string) ([]Config, error) {
|
||||
var out []Config
|
||||
|
||||
// TODO(dh): consider stopping at the GOPATH/module boundary
|
||||
for dir != "" {
|
||||
f, err := os.Open(filepath.Join(dir, configName))
|
||||
if os.IsNotExist(err) {
|
||||
ndir := filepath.Dir(dir)
|
||||
if ndir == dir {
|
||||
break
|
||||
}
|
||||
dir = ndir
|
||||
continue
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var cfg Config
|
||||
_, err = toml.DecodeReader(f, &cfg)
|
||||
f.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
out = append(out, cfg)
|
||||
ndir := filepath.Dir(dir)
|
||||
if ndir == dir {
|
||||
break
|
||||
}
|
||||
dir = ndir
|
||||
}
|
||||
out = append(out, defaultConfig)
|
||||
if len(out) < 2 {
|
||||
return out, nil
|
||||
}
|
||||
for i := 0; i < len(out)/2; i++ {
|
||||
out[i], out[len(out)-1-i] = out[len(out)-1-i], out[i]
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func mergeConfigs(confs []Config) Config {
|
||||
if len(confs) == 0 {
|
||||
// This shouldn't happen because we always have at least a
|
||||
// default config.
|
||||
panic("trying to merge zero configs")
|
||||
}
|
||||
if len(confs) == 1 {
|
||||
return confs[0]
|
||||
}
|
||||
conf := confs[0]
|
||||
for _, oconf := range confs[1:] {
|
||||
conf = conf.Merge(oconf)
|
||||
}
|
||||
return conf
|
||||
}
|
||||
|
||||
func Load(dir string) (Config, error) {
|
||||
confs, err := parseConfigs(dir)
|
||||
if err != nil {
|
||||
return Config{}, err
|
||||
}
|
||||
conf := mergeConfigs(confs)
|
||||
|
||||
conf.Checks = normalizeList(conf.Checks)
|
||||
conf.Initialisms = normalizeList(conf.Initialisms)
|
||||
conf.DotImportWhitelist = normalizeList(conf.DotImportWhitelist)
|
||||
conf.HTTPStatusCodeWhitelist = normalizeList(conf.HTTPStatusCodeWhitelist)
|
||||
|
||||
return conf, nil
|
||||
}
|
10
vendor/github.com/golangci/go-tools/config/example.conf
generated
vendored
Normal file
10
vendor/github.com/golangci/go-tools/config/example.conf
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
checks = ["all", "-ST1003", "-ST1014"]
|
||||
initialisms = ["ACL", "API", "ASCII", "CPU", "CSS", "DNS",
|
||||
"EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID",
|
||||
"IP", "JSON", "QPS", "RAM", "RPC", "SLA",
|
||||
"SMTP", "SQL", "SSH", "TCP", "TLS", "TTL",
|
||||
"UDP", "UI", "GID", "UID", "UUID", "URI",
|
||||
"URL", "UTF8", "VM", "XML", "XMPP", "XSRF",
|
||||
"XSS"]
|
||||
dot_import_whitelist = []
|
||||
http_status_code_whitelist = ["200", "400", "404", "500"]
|
2
vendor/github.com/golangci/go-tools/functions/concrete.go
generated
vendored
2
vendor/github.com/golangci/go-tools/functions/concrete.go
generated
vendored
|
@ -4,7 +4,7 @@ import (
|
|||
"go/token"
|
||||
"go/types"
|
||||
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
"github.com/golangci/go-tools/ssa"
|
||||
)
|
||||
|
||||
func concreteReturnTypes(fn *ssa.Function) []*types.Tuple {
|
||||
|
|
6
vendor/github.com/golangci/go-tools/functions/functions.go
generated
vendored
6
vendor/github.com/golangci/go-tools/functions/functions.go
generated
vendored
|
@ -4,9 +4,9 @@ import (
|
|||
"go/types"
|
||||
"sync"
|
||||
|
||||
"github.com/golangci/tools/go/callgraph"
|
||||
"github.com/golangci/tools/go/callgraph/static"
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
"github.com/golangci/go-tools/callgraph"
|
||||
"github.com/golangci/go-tools/callgraph/static"
|
||||
"github.com/golangci/go-tools/ssa"
|
||||
"github.com/golangci/go-tools/staticcheck/vrp"
|
||||
)
|
||||
|
||||
|
|
2
vendor/github.com/golangci/go-tools/functions/loops.go
generated
vendored
2
vendor/github.com/golangci/go-tools/functions/loops.go
generated
vendored
|
@ -1,6 +1,6 @@
|
|||
package functions
|
||||
|
||||
import "github.com/golangci/tools/go/ssa"
|
||||
import "github.com/golangci/go-tools/ssa"
|
||||
|
||||
type Loop map[*ssa.BasicBlock]bool
|
||||
|
||||
|
|
8
vendor/github.com/golangci/go-tools/functions/pure.go
generated
vendored
8
vendor/github.com/golangci/go-tools/functions/pure.go
generated
vendored
|
@ -4,9 +4,9 @@ import (
|
|||
"go/token"
|
||||
"go/types"
|
||||
|
||||
"github.com/golangci/tools/go/callgraph"
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
"github.com/golangci/go-tools/lint"
|
||||
"github.com/golangci/go-tools/callgraph"
|
||||
"github.com/golangci/go-tools/lint/lintdsl"
|
||||
"github.com/golangci/go-tools/ssa"
|
||||
)
|
||||
|
||||
// IsStub reports whether a function is a stub. A function is
|
||||
|
@ -20,7 +20,7 @@ func (d *Descriptions) IsStub(fn *ssa.Function) bool {
|
|||
if len(fn.Blocks) > 1 {
|
||||
return false
|
||||
}
|
||||
instrs := lint.FilterDebug(fn.Blocks[0].Instrs)
|
||||
instrs := lintdsl.FilterDebug(fn.Blocks[0].Instrs)
|
||||
if len(instrs) != 1 {
|
||||
return false
|
||||
}
|
||||
|
|
2
vendor/github.com/golangci/go-tools/functions/terminates.go
generated
vendored
2
vendor/github.com/golangci/go-tools/functions/terminates.go
generated
vendored
|
@ -1,6 +1,6 @@
|
|||
package functions
|
||||
|
||||
import "github.com/golangci/tools/go/ssa"
|
||||
import "github.com/golangci/go-tools/ssa"
|
||||
|
||||
// terminates reports whether fn is supposed to return, that is if it
|
||||
// has at least one theoretic path that returns from the function.
|
||||
|
|
108
vendor/github.com/golangci/go-tools/internal/sharedcheck/lint.go
generated
vendored
108
vendor/github.com/golangci/go-tools/internal/sharedcheck/lint.go
generated
vendored
|
@ -5,66 +5,64 @@ import (
|
|||
"go/types"
|
||||
|
||||
"github.com/golangci/go-tools/lint"
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
. "github.com/golangci/go-tools/lint/lintdsl"
|
||||
"github.com/golangci/go-tools/ssa"
|
||||
)
|
||||
|
||||
func CheckRangeStringRunes(nodeFns map[ast.Node]*ssa.Function, j *lint.Job) {
|
||||
fn := func(node ast.Node) bool {
|
||||
rng, ok := node.(*ast.RangeStmt)
|
||||
if !ok || !lint.IsBlank(rng.Key) {
|
||||
return true
|
||||
}
|
||||
ssafn := nodeFns[rng]
|
||||
if ssafn == nil {
|
||||
return true
|
||||
}
|
||||
v, _ := ssafn.ValueForExpr(rng.X)
|
||||
func CheckRangeStringRunes(j *lint.Job) {
|
||||
for _, ssafn := range j.Program.InitialFunctions {
|
||||
fn := func(node ast.Node) bool {
|
||||
rng, ok := node.(*ast.RangeStmt)
|
||||
if !ok || !IsBlank(rng.Key) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Check that we're converting from string to []rune
|
||||
val, _ := v.(*ssa.Convert)
|
||||
if val == nil {
|
||||
return true
|
||||
}
|
||||
Tsrc, ok := val.X.Type().(*types.Basic)
|
||||
if !ok || Tsrc.Kind() != types.String {
|
||||
return true
|
||||
}
|
||||
Tdst, ok := val.Type().(*types.Slice)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
TdstElem, ok := Tdst.Elem().(*types.Basic)
|
||||
if !ok || TdstElem.Kind() != types.Int32 {
|
||||
return true
|
||||
}
|
||||
v, _ := ssafn.ValueForExpr(rng.X)
|
||||
|
||||
// Check that we're converting from string to []rune
|
||||
val, _ := v.(*ssa.Convert)
|
||||
if val == nil {
|
||||
return true
|
||||
}
|
||||
Tsrc, ok := val.X.Type().(*types.Basic)
|
||||
if !ok || Tsrc.Kind() != types.String {
|
||||
return true
|
||||
}
|
||||
Tdst, ok := val.Type().(*types.Slice)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
TdstElem, ok := Tdst.Elem().(*types.Basic)
|
||||
if !ok || TdstElem.Kind() != types.Int32 {
|
||||
return true
|
||||
}
|
||||
|
||||
// Check that the result of the conversion is only used to
|
||||
// range over
|
||||
refs := val.Referrers()
|
||||
if refs == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
// Expect two refs: one for obtaining the length of the slice,
|
||||
// one for accessing the elements
|
||||
if len(FilterDebug(*refs)) != 2 {
|
||||
// TODO(dh): right now, we check that only one place
|
||||
// refers to our slice. This will miss cases such as
|
||||
// ranging over the slice twice. Ideally, we'd ensure that
|
||||
// the slice is only used for ranging over (without
|
||||
// accessing the key), but that is harder to do because in
|
||||
// SSA form, ranging over a slice looks like an ordinary
|
||||
// loop with index increments and slice accesses. We'd
|
||||
// have to look at the associated AST node to check that
|
||||
// it's a range statement.
|
||||
return true
|
||||
}
|
||||
|
||||
j.Errorf(rng, "should range over string, not []rune(string)")
|
||||
|
||||
// Check that the result of the conversion is only used to
|
||||
// range over
|
||||
refs := val.Referrers()
|
||||
if refs == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
// Expect two refs: one for obtaining the length of the slice,
|
||||
// one for accessing the elements
|
||||
if len(lint.FilterDebug(*refs)) != 2 {
|
||||
// TODO(dh): right now, we check that only one place
|
||||
// refers to our slice. This will miss cases such as
|
||||
// ranging over the slice twice. Ideally, we'd ensure that
|
||||
// the slice is only used for ranging over (without
|
||||
// accessing the key), but that is harder to do because in
|
||||
// SSA form, ranging over a slice looks like an ordinary
|
||||
// loop with index increments and slice accesses. We'd
|
||||
// have to look at the associated AST node to check that
|
||||
// it's a range statement.
|
||||
return true
|
||||
}
|
||||
|
||||
j.Errorf(rng, "should range over string, not []rune(string)")
|
||||
|
||||
return true
|
||||
}
|
||||
for _, f := range j.Program.Files {
|
||||
ast.Inspect(f, fn)
|
||||
Inspect(ssafn.Syntax(), fn)
|
||||
}
|
||||
}
|
||||
|
|
33
vendor/github.com/golangci/go-tools/lint/generated.go
generated
vendored
Normal file
33
vendor/github.com/golangci/go-tools/lint/generated.go
generated
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
package lint
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"io"
|
||||
)
|
||||
|
||||
var (
|
||||
prefix = []byte("// Code generated ")
|
||||
suffix = []byte(" DO NOT EDIT.")
|
||||
nl = []byte("\n")
|
||||
crnl = []byte("\r\n")
|
||||
)
|
||||
|
||||
func isGenerated(r io.Reader) bool {
|
||||
br := bufio.NewReader(r)
|
||||
for {
|
||||
s, err := br.ReadBytes('\n')
|
||||
if err != nil && err != io.EOF {
|
||||
return false
|
||||
}
|
||||
s = bytes.TrimSuffix(s, crnl)
|
||||
s = bytes.TrimSuffix(s, nl)
|
||||
if bytes.HasPrefix(s, prefix) && bytes.HasSuffix(s, suffix) {
|
||||
return true
|
||||
}
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
856
vendor/github.com/golangci/go-tools/lint/lint.go
generated
vendored
856
vendor/github.com/golangci/go-tools/lint/lint.go
generated
vendored
File diff suppressed because it is too large
Load diff
342
vendor/github.com/golangci/go-tools/lint/lintdsl/lintdsl.go
generated
vendored
Normal file
342
vendor/github.com/golangci/go-tools/lint/lintdsl/lintdsl.go
generated
vendored
Normal file
|
@ -0,0 +1,342 @@
|
|||
// Package lintdsl provides helpers for implementing static analysis
|
||||
// checks. Dot-importing this package is encouraged.
|
||||
package lintdsl
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/constant"
|
||||
"go/printer"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"strings"
|
||||
|
||||
"github.com/golangci/go-tools/lint"
|
||||
"github.com/golangci/go-tools/ssa"
|
||||
)
|
||||
|
||||
type packager interface {
|
||||
Package() *ssa.Package
|
||||
}
|
||||
|
||||
func CallName(call *ssa.CallCommon) string {
|
||||
if call.IsInvoke() {
|
||||
return ""
|
||||
}
|
||||
switch v := call.Value.(type) {
|
||||
case *ssa.Function:
|
||||
fn, ok := v.Object().(*types.Func)
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
return fn.FullName()
|
||||
case *ssa.Builtin:
|
||||
return v.Name()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func IsCallTo(call *ssa.CallCommon, name string) bool { return CallName(call) == name }
|
||||
func IsType(T types.Type, name string) bool { return types.TypeString(T, nil) == name }
|
||||
|
||||
func FilterDebug(instr []ssa.Instruction) []ssa.Instruction {
|
||||
var out []ssa.Instruction
|
||||
for _, ins := range instr {
|
||||
if _, ok := ins.(*ssa.DebugRef); !ok {
|
||||
out = append(out, ins)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func IsExample(fn *ssa.Function) bool {
|
||||
if !strings.HasPrefix(fn.Name(), "Example") {
|
||||
return false
|
||||
}
|
||||
f := fn.Prog.Fset.File(fn.Pos())
|
||||
if f == nil {
|
||||
return false
|
||||
}
|
||||
return strings.HasSuffix(f.Name(), "_test.go")
|
||||
}
|
||||
|
||||
func IsPointerLike(T types.Type) bool {
|
||||
switch T := T.Underlying().(type) {
|
||||
case *types.Interface, *types.Chan, *types.Map, *types.Pointer:
|
||||
return true
|
||||
case *types.Basic:
|
||||
return T.Kind() == types.UnsafePointer
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func IsGenerated(f *ast.File) bool {
|
||||
comments := f.Comments
|
||||
if len(comments) > 0 {
|
||||
comment := comments[0].Text()
|
||||
return strings.Contains(comment, "Code generated by") ||
|
||||
strings.Contains(comment, "DO NOT EDIT")
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func IsIdent(expr ast.Expr, ident string) bool {
|
||||
id, ok := expr.(*ast.Ident)
|
||||
return ok && id.Name == ident
|
||||
}
|
||||
|
||||
// isBlank returns whether id is the blank identifier "_".
|
||||
// If id == nil, the answer is false.
|
||||
func IsBlank(id ast.Expr) bool {
|
||||
ident, _ := id.(*ast.Ident)
|
||||
return ident != nil && ident.Name == "_"
|
||||
}
|
||||
|
||||
func IsIntLiteral(expr ast.Expr, literal string) bool {
|
||||
lit, ok := expr.(*ast.BasicLit)
|
||||
return ok && lit.Kind == token.INT && lit.Value == literal
|
||||
}
|
||||
|
||||
// Deprecated: use IsIntLiteral instead
|
||||
func IsZero(expr ast.Expr) bool {
|
||||
return IsIntLiteral(expr, "0")
|
||||
}
|
||||
|
||||
func TypeOf(j *lint.Job, expr ast.Expr) types.Type {
|
||||
if expr == nil {
|
||||
return nil
|
||||
}
|
||||
return j.NodePackage(expr).TypesInfo.TypeOf(expr)
|
||||
}
|
||||
|
||||
func IsOfType(j *lint.Job, expr ast.Expr, name string) bool { return IsType(TypeOf(j, expr), name) }
|
||||
|
||||
func ObjectOf(j *lint.Job, ident *ast.Ident) types.Object {
|
||||
if ident == nil {
|
||||
return nil
|
||||
}
|
||||
return j.NodePackage(ident).TypesInfo.ObjectOf(ident)
|
||||
}
|
||||
|
||||
func IsInTest(j *lint.Job, node lint.Positioner) bool {
|
||||
// FIXME(dh): this doesn't work for global variables with
|
||||
// initializers
|
||||
f := j.Program.SSA.Fset.File(node.Pos())
|
||||
return f != nil && strings.HasSuffix(f.Name(), "_test.go")
|
||||
}
|
||||
|
||||
func IsInMain(j *lint.Job, node lint.Positioner) bool {
|
||||
if node, ok := node.(packager); ok {
|
||||
return node.Package().Pkg.Name() == "main"
|
||||
}
|
||||
pkg := j.NodePackage(node)
|
||||
if pkg == nil {
|
||||
return false
|
||||
}
|
||||
return pkg.Types.Name() == "main"
|
||||
}
|
||||
|
||||
func SelectorName(j *lint.Job, expr *ast.SelectorExpr) string {
|
||||
info := j.NodePackage(expr).TypesInfo
|
||||
sel := info.Selections[expr]
|
||||
if sel == nil {
|
||||
if x, ok := expr.X.(*ast.Ident); ok {
|
||||
pkg, ok := info.ObjectOf(x).(*types.PkgName)
|
||||
if !ok {
|
||||
// This shouldn't happen
|
||||
return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name)
|
||||
}
|
||||
return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name)
|
||||
}
|
||||
panic(fmt.Sprintf("unsupported selector: %v", expr))
|
||||
}
|
||||
return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name())
|
||||
}
|
||||
|
||||
func IsNil(j *lint.Job, expr ast.Expr) bool {
|
||||
return j.NodePackage(expr).TypesInfo.Types[expr].IsNil()
|
||||
}
|
||||
|
||||
func BoolConst(j *lint.Job, expr ast.Expr) bool {
|
||||
val := j.NodePackage(expr).TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val()
|
||||
return constant.BoolVal(val)
|
||||
}
|
||||
|
||||
func IsBoolConst(j *lint.Job, expr ast.Expr) bool {
|
||||
// We explicitly don't support typed bools because more often than
|
||||
// not, custom bool types are used as binary enums and the
|
||||
// explicit comparison is desired.
|
||||
|
||||
ident, ok := expr.(*ast.Ident)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
obj := j.NodePackage(expr).TypesInfo.ObjectOf(ident)
|
||||
c, ok := obj.(*types.Const)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
basic, ok := c.Type().(*types.Basic)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
if basic.Kind() != types.UntypedBool && basic.Kind() != types.Bool {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func ExprToInt(j *lint.Job, expr ast.Expr) (int64, bool) {
|
||||
tv := j.NodePackage(expr).TypesInfo.Types[expr]
|
||||
if tv.Value == nil {
|
||||
return 0, false
|
||||
}
|
||||
if tv.Value.Kind() != constant.Int {
|
||||
return 0, false
|
||||
}
|
||||
return constant.Int64Val(tv.Value)
|
||||
}
|
||||
|
||||
func ExprToString(j *lint.Job, expr ast.Expr) (string, bool) {
|
||||
val := j.NodePackage(expr).TypesInfo.Types[expr].Value
|
||||
if val == nil {
|
||||
return "", false
|
||||
}
|
||||
if val.Kind() != constant.String {
|
||||
return "", false
|
||||
}
|
||||
return constant.StringVal(val), true
|
||||
}
|
||||
|
||||
// Dereference returns a pointer's element type; otherwise it returns
|
||||
// T.
|
||||
func Dereference(T types.Type) types.Type {
|
||||
if p, ok := T.Underlying().(*types.Pointer); ok {
|
||||
return p.Elem()
|
||||
}
|
||||
return T
|
||||
}
|
||||
|
||||
// DereferenceR returns a pointer's element type; otherwise it returns
|
||||
// T. If the element type is itself a pointer, DereferenceR will be
|
||||
// applied recursively.
|
||||
func DereferenceR(T types.Type) types.Type {
|
||||
if p, ok := T.Underlying().(*types.Pointer); ok {
|
||||
return DereferenceR(p.Elem())
|
||||
}
|
||||
return T
|
||||
}
|
||||
|
||||
func IsGoVersion(j *lint.Job, minor int) bool {
|
||||
return j.Program.GoVersion >= minor
|
||||
}
|
||||
|
||||
func CallNameAST(j *lint.Job, call *ast.CallExpr) string {
|
||||
switch fun := call.Fun.(type) {
|
||||
case *ast.SelectorExpr:
|
||||
fn, ok := ObjectOf(j, fun.Sel).(*types.Func)
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
return fn.FullName()
|
||||
case *ast.Ident:
|
||||
obj := ObjectOf(j, fun)
|
||||
switch obj := obj.(type) {
|
||||
case *types.Func:
|
||||
return obj.FullName()
|
||||
case *types.Builtin:
|
||||
return obj.Name()
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func IsCallToAST(j *lint.Job, node ast.Node, name string) bool {
|
||||
call, ok := node.(*ast.CallExpr)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return CallNameAST(j, call) == name
|
||||
}
|
||||
|
||||
func IsCallToAnyAST(j *lint.Job, node ast.Node, names ...string) bool {
|
||||
for _, name := range names {
|
||||
if IsCallToAST(j, node, name) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func Render(j *lint.Job, x interface{}) string {
|
||||
fset := j.Program.SSA.Fset
|
||||
var buf bytes.Buffer
|
||||
if err := printer.Fprint(&buf, fset, x); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func RenderArgs(j *lint.Job, args []ast.Expr) string {
|
||||
var ss []string
|
||||
for _, arg := range args {
|
||||
ss = append(ss, Render(j, arg))
|
||||
}
|
||||
return strings.Join(ss, ", ")
|
||||
}
|
||||
|
||||
func Preamble(f *ast.File) string {
|
||||
cutoff := f.Package
|
||||
if f.Doc != nil {
|
||||
cutoff = f.Doc.Pos()
|
||||
}
|
||||
var out []string
|
||||
for _, cmt := range f.Comments {
|
||||
if cmt.Pos() >= cutoff {
|
||||
break
|
||||
}
|
||||
out = append(out, cmt.Text())
|
||||
}
|
||||
return strings.Join(out, "\n")
|
||||
}
|
||||
|
||||
func Inspect(node ast.Node, fn func(node ast.Node) bool) {
|
||||
if node == nil {
|
||||
return
|
||||
}
|
||||
ast.Inspect(node, fn)
|
||||
}
|
||||
|
||||
func GroupSpecs(j *lint.Job, specs []ast.Spec) [][]ast.Spec {
|
||||
if len(specs) == 0 {
|
||||
return nil
|
||||
}
|
||||
fset := j.Program.SSA.Fset
|
||||
groups := make([][]ast.Spec, 1)
|
||||
groups[0] = append(groups[0], specs[0])
|
||||
|
||||
for _, spec := range specs[1:] {
|
||||
g := groups[len(groups)-1]
|
||||
if fset.PositionFor(spec.Pos(), false).Line-1 !=
|
||||
fset.PositionFor(g[len(g)-1].End(), false).Line {
|
||||
|
||||
groups = append(groups, nil)
|
||||
}
|
||||
|
||||
groups[len(groups)-1] = append(groups[len(groups)-1], spec)
|
||||
}
|
||||
|
||||
return groups
|
||||
}
|
||||
|
||||
func IsObject(obj types.Object, name string) bool {
|
||||
var path string
|
||||
if pkg := obj.Pkg(); pkg != nil {
|
||||
path = pkg.Path() + "."
|
||||
}
|
||||
return path+obj.Name() == name
|
||||
}
|
128
vendor/github.com/golangci/go-tools/lint/lintutil/format/format.go
generated
vendored
Normal file
128
vendor/github.com/golangci/go-tools/lint/lintutil/format/format.go
generated
vendored
Normal file
|
@ -0,0 +1,128 @@
|
|||
// Package format provides formatters for linter problems.
|
||||
package format
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"go/token"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"text/tabwriter"
|
||||
|
||||
"github.com/golangci/go-tools/lint"
|
||||
)
|
||||
|
||||
func shortPath(path string) string {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return path
|
||||
}
|
||||
if rel, err := filepath.Rel(cwd, path); err == nil && len(rel) < len(path) {
|
||||
return rel
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
func relativePositionString(pos token.Position) string {
|
||||
s := shortPath(pos.Filename)
|
||||
if pos.IsValid() {
|
||||
if s != "" {
|
||||
s += ":"
|
||||
}
|
||||
s += fmt.Sprintf("%d:%d", pos.Line, pos.Column)
|
||||
}
|
||||
if s == "" {
|
||||
s = "-"
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
type Statter interface {
|
||||
Stats(total, errors, warnings int)
|
||||
}
|
||||
|
||||
type Formatter interface {
|
||||
Format(p lint.Problem)
|
||||
}
|
||||
|
||||
type Text struct {
|
||||
W io.Writer
|
||||
}
|
||||
|
||||
func (o Text) Format(p lint.Problem) {
|
||||
fmt.Fprintf(o.W, "%v: %s\n", relativePositionString(p.Position), p.String())
|
||||
}
|
||||
|
||||
type JSON struct {
|
||||
W io.Writer
|
||||
}
|
||||
|
||||
func severity(s lint.Severity) string {
|
||||
switch s {
|
||||
case lint.Error:
|
||||
return "error"
|
||||
case lint.Warning:
|
||||
return "warning"
|
||||
case lint.Ignored:
|
||||
return "ignored"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (o JSON) Format(p lint.Problem) {
|
||||
type location struct {
|
||||
File string `json:"file"`
|
||||
Line int `json:"line"`
|
||||
Column int `json:"column"`
|
||||
}
|
||||
jp := struct {
|
||||
Code string `json:"code"`
|
||||
Severity string `json:"severity,omitempty"`
|
||||
Location location `json:"location"`
|
||||
Message string `json:"message"`
|
||||
}{
|
||||
Code: p.Check,
|
||||
Severity: severity(p.Severity),
|
||||
Location: location{
|
||||
File: p.Position.Filename,
|
||||
Line: p.Position.Line,
|
||||
Column: p.Position.Column,
|
||||
},
|
||||
Message: p.Text,
|
||||
}
|
||||
_ = json.NewEncoder(o.W).Encode(jp)
|
||||
}
|
||||
|
||||
type Stylish struct {
|
||||
W io.Writer
|
||||
|
||||
prevFile string
|
||||
tw *tabwriter.Writer
|
||||
}
|
||||
|
||||
func (o *Stylish) Format(p lint.Problem) {
|
||||
if p.Position.Filename == "" {
|
||||
p.Position.Filename = "-"
|
||||
}
|
||||
|
||||
if p.Position.Filename != o.prevFile {
|
||||
if o.prevFile != "" {
|
||||
o.tw.Flush()
|
||||
fmt.Fprintln(o.W)
|
||||
}
|
||||
fmt.Fprintln(o.W, p.Position.Filename)
|
||||
o.prevFile = p.Position.Filename
|
||||
o.tw = tabwriter.NewWriter(o.W, 0, 4, 2, ' ', 0)
|
||||
}
|
||||
fmt.Fprintf(o.tw, " (%d, %d)\t%s\t%s\n", p.Position.Line, p.Position.Column, p.Check, p.Text)
|
||||
}
|
||||
|
||||
func (o *Stylish) Stats(total, errors, warnings int) {
|
||||
if o.tw != nil {
|
||||
o.tw.Flush()
|
||||
fmt.Fprintln(o.W)
|
||||
}
|
||||
fmt.Fprintf(o.W, " ✖ %d problems (%d errors, %d warnings)\n",
|
||||
total, errors, warnings)
|
||||
}
|
349
vendor/github.com/golangci/go-tools/lint/lintutil/util.go
generated
vendored
349
vendor/github.com/golangci/go-tools/lint/lintutil/util.go
generated
vendored
|
@ -8,68 +8,28 @@
|
|||
package lintutil // import "github.com/golangci/go-tools/lint/lintutil"
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"go/token"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"runtime/pprof"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/golangci/go-tools/config"
|
||||
"github.com/golangci/go-tools/lint"
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
"github.com/golangci/go-tools/lint/lintutil/format"
|
||||
"github.com/golangci/go-tools/version"
|
||||
|
||||
"golang.org/x/tools/go/loader"
|
||||
"golang.org/x/tools/go/packages"
|
||||
)
|
||||
|
||||
type OutputFormatter interface {
|
||||
Format(p lint.Problem)
|
||||
}
|
||||
|
||||
type TextOutput struct {
|
||||
w io.Writer
|
||||
}
|
||||
|
||||
func (o TextOutput) Format(p lint.Problem) {
|
||||
fmt.Fprintf(o.w, "%v: %s\n", relativePositionString(p.Position), p.String())
|
||||
}
|
||||
|
||||
type JSONOutput struct {
|
||||
w io.Writer
|
||||
}
|
||||
|
||||
func (o JSONOutput) Format(p lint.Problem) {
|
||||
type location struct {
|
||||
File string `json:"file"`
|
||||
Line int `json:"line"`
|
||||
Column int `json:"column"`
|
||||
}
|
||||
jp := struct {
|
||||
Checker string `json:"checker"`
|
||||
Code string `json:"code"`
|
||||
Severity string `json:"severity,omitempty"`
|
||||
Location location `json:"location"`
|
||||
Message string `json:"message"`
|
||||
Ignored bool `json:"ignored"`
|
||||
}{
|
||||
p.Checker,
|
||||
p.Check,
|
||||
"", // TODO(dh): support severity
|
||||
location{
|
||||
p.Position.Filename,
|
||||
p.Position.Line,
|
||||
p.Position.Column,
|
||||
},
|
||||
p.Text,
|
||||
p.Ignored,
|
||||
}
|
||||
_ = json.NewEncoder(o.w).Encode(jp)
|
||||
}
|
||||
func usage(name string, flags *flag.FlagSet) func() {
|
||||
return func() {
|
||||
fmt.Fprintf(os.Stderr, "Usage of %s:\n", name)
|
||||
|
@ -82,38 +42,6 @@ func usage(name string, flags *flag.FlagSet) func() {
|
|||
}
|
||||
}
|
||||
|
||||
type runner struct {
|
||||
checker lint.Checker
|
||||
tags []string
|
||||
ignores []lint.Ignore
|
||||
version int
|
||||
returnIgnored bool
|
||||
}
|
||||
|
||||
func resolveRelative(importPaths []string, tags []string) (goFiles bool, err error) {
|
||||
if len(importPaths) == 0 {
|
||||
return false, nil
|
||||
}
|
||||
if strings.HasSuffix(importPaths[0], ".go") {
|
||||
// User is specifying a package in terms of .go files, don't resolve
|
||||
return true, nil
|
||||
}
|
||||
wd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
ctx := build.Default
|
||||
ctx.BuildTags = tags
|
||||
for i, path := range importPaths {
|
||||
bpkg, err := ctx.Import(path, wd, build.FindOnly)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("can't load package %q: %v", path, err)
|
||||
}
|
||||
importPaths[i] = bpkg.ImportPath
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func parseIgnore(s string) ([]lint.Ignore, error) {
|
||||
var out []lint.Ignore
|
||||
if len(s) == 0 {
|
||||
|
@ -156,16 +84,41 @@ func (v *versionFlag) Get() interface{} {
|
|||
return int(*v)
|
||||
}
|
||||
|
||||
type list []string
|
||||
|
||||
func (list *list) String() string {
|
||||
return `"` + strings.Join(*list, ",") + `"`
|
||||
}
|
||||
|
||||
func (list *list) Set(s string) error {
|
||||
if s == "" {
|
||||
*list = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
*list = strings.Split(s, ",")
|
||||
return nil
|
||||
}
|
||||
|
||||
func FlagSet(name string) *flag.FlagSet {
|
||||
flags := flag.NewFlagSet("", flag.ExitOnError)
|
||||
flags.Usage = usage(name, flags)
|
||||
flags.Float64("min_confidence", 0, "Deprecated; use -ignore instead")
|
||||
flags.String("tags", "", "List of `build tags`")
|
||||
flags.String("ignore", "", "Space separated list of checks to ignore, in the following format: 'import/path/file.go:Check1,Check2,...' Both the import path and file name sections support globbing, e.g. 'os/exec/*_test.go'")
|
||||
flags.String("ignore", "", "Deprecated: use linter directives instead")
|
||||
flags.Bool("tests", true, "Include tests")
|
||||
flags.Bool("version", false, "Print version and exit")
|
||||
flags.Bool("show-ignored", false, "Don't filter ignored problems")
|
||||
flags.String("f", "text", "Output `format` (valid choices are 'text' and 'json')")
|
||||
flags.String("f", "text", "Output `format` (valid choices are 'stylish', 'text' and 'json')")
|
||||
|
||||
flags.Int("debug.max-concurrent-jobs", 0, "Number of jobs to run concurrently")
|
||||
flags.Bool("debug.print-stats", false, "Print debug statistics")
|
||||
flags.String("debug.cpuprofile", "", "Write CPU profile to `file`")
|
||||
flags.String("debug.memprofile", "", "Write memory profile to `file`")
|
||||
|
||||
checks := list{"inherit"}
|
||||
fail := list{"all"}
|
||||
flags.Var(&checks, "checks", "Comma-separated list of `checks` to enable.")
|
||||
flags.Var(&fail, "fail", "Comma-separated list of `checks` that can cause a non-zero exit status.")
|
||||
|
||||
tags := build.Default.ReleaseTags
|
||||
v := tags[len(tags)-1][2:]
|
||||
|
@ -178,56 +131,129 @@ func FlagSet(name string) *flag.FlagSet {
|
|||
return flags
|
||||
}
|
||||
|
||||
type CheckerConfig struct {
|
||||
Checker lint.Checker
|
||||
ExitNonZero bool
|
||||
}
|
||||
|
||||
func ProcessFlagSet(confs []CheckerConfig, fs *flag.FlagSet, lprog *loader.Program, ssaProg *ssa.Program, conf *loader.Config) []lint.Problem {
|
||||
func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) {
|
||||
tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string)
|
||||
ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string)
|
||||
tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool)
|
||||
goVersion := fs.Lookup("go").Value.(flag.Getter).Get().(int)
|
||||
formatter := fs.Lookup("f").Value.(flag.Getter).Get().(string)
|
||||
printVersion := fs.Lookup("version").Value.(flag.Getter).Get().(bool)
|
||||
showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool)
|
||||
|
||||
if printVersion {
|
||||
version.Print()
|
||||
os.Exit(0)
|
||||
maxConcurrentJobs := fs.Lookup("debug.max-concurrent-jobs").Value.(flag.Getter).Get().(int)
|
||||
printStats := fs.Lookup("debug.print-stats").Value.(flag.Getter).Get().(bool)
|
||||
cpuProfile := fs.Lookup("debug.cpuprofile").Value.(flag.Getter).Get().(string)
|
||||
memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string)
|
||||
|
||||
cfg := config.Config{}
|
||||
cfg.Checks = *fs.Lookup("checks").Value.(*list)
|
||||
|
||||
exit := func(code int) {
|
||||
if cpuProfile != "" {
|
||||
pprof.StopCPUProfile()
|
||||
}
|
||||
if memProfile != "" {
|
||||
f, err := os.Create(memProfile)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
runtime.GC()
|
||||
pprof.WriteHeapProfile(f)
|
||||
}
|
||||
os.Exit(code)
|
||||
}
|
||||
if cpuProfile != "" {
|
||||
f, err := os.Create(cpuProfile)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
pprof.StartCPUProfile(f)
|
||||
}
|
||||
|
||||
var cs []lint.Checker
|
||||
for _, conf := range confs {
|
||||
cs = append(cs, conf.Checker)
|
||||
if printVersion {
|
||||
version.Print()
|
||||
exit(0)
|
||||
}
|
||||
pss, err := Lint(cs, lprog, ssaProg, conf, &Options{
|
||||
|
||||
ps, err := Lint(cs, fs.Args(), &Options{
|
||||
Tags: strings.Fields(tags),
|
||||
LintTests: tests,
|
||||
Ignores: ignore,
|
||||
GoVersion: goVersion,
|
||||
ReturnIgnored: showIgnored,
|
||||
Config: cfg,
|
||||
|
||||
MaxConcurrentJobs: maxConcurrentJobs,
|
||||
PrintStats: printStats,
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
exit(1)
|
||||
}
|
||||
|
||||
var ps []lint.Problem
|
||||
for _, p := range pss {
|
||||
ps = append(ps, p...)
|
||||
var f format.Formatter
|
||||
switch formatter {
|
||||
case "text":
|
||||
f = format.Text{W: os.Stdout}
|
||||
case "stylish":
|
||||
f = &format.Stylish{W: os.Stdout}
|
||||
case "json":
|
||||
f = format.JSON{W: os.Stdout}
|
||||
default:
|
||||
fmt.Fprintf(os.Stderr, "unsupported output format %q\n", formatter)
|
||||
exit(2)
|
||||
}
|
||||
|
||||
return ps
|
||||
var (
|
||||
total int
|
||||
errors int
|
||||
warnings int
|
||||
)
|
||||
|
||||
fail := *fs.Lookup("fail").Value.(*list)
|
||||
var allChecks []string
|
||||
for _, p := range ps {
|
||||
allChecks = append(allChecks, p.Check)
|
||||
}
|
||||
|
||||
shouldExit := lint.FilterChecks(allChecks, fail)
|
||||
|
||||
total = len(ps)
|
||||
for _, p := range ps {
|
||||
if shouldExit[p.Check] {
|
||||
errors++
|
||||
} else {
|
||||
p.Severity = lint.Warning
|
||||
warnings++
|
||||
}
|
||||
f.Format(p)
|
||||
}
|
||||
if f, ok := f.(format.Statter); ok {
|
||||
f.Stats(total, errors, warnings)
|
||||
}
|
||||
if errors > 0 {
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
type Options struct {
|
||||
Config config.Config
|
||||
|
||||
Tags []string
|
||||
LintTests bool
|
||||
Ignores string
|
||||
GoVersion int
|
||||
ReturnIgnored bool
|
||||
|
||||
MaxConcurrentJobs int
|
||||
PrintStats bool
|
||||
}
|
||||
|
||||
func Lint(cs []lint.Checker, lprog *loader.Program, ssaProg *ssa.Program, conf *loader.Config, opt *Options) ([][]lint.Problem, error) {
|
||||
func Lint(cs []lint.Checker, paths []string, opt *Options) ([]lint.Problem, error) {
|
||||
stats := lint.PerfStats{
|
||||
CheckerInits: map[string]time.Duration{},
|
||||
}
|
||||
|
||||
if opt == nil {
|
||||
opt = &Options{}
|
||||
}
|
||||
|
@ -236,58 +262,101 @@ func Lint(cs []lint.Checker, lprog *loader.Program, ssaProg *ssa.Program, conf *
|
|||
return nil, err
|
||||
}
|
||||
|
||||
var problems [][]lint.Problem
|
||||
for _, c := range cs {
|
||||
runner := &runner{
|
||||
checker: c,
|
||||
tags: opt.Tags,
|
||||
ignores: ignores,
|
||||
version: opt.GoVersion,
|
||||
returnIgnored: opt.ReturnIgnored,
|
||||
}
|
||||
problems = append(problems, runner.lint(lprog, ssaProg, conf))
|
||||
conf := &packages.Config{
|
||||
Mode: packages.LoadAllSyntax,
|
||||
Tests: opt.LintTests,
|
||||
BuildFlags: []string{
|
||||
"-tags=" + strings.Join(opt.Tags, " "),
|
||||
},
|
||||
}
|
||||
|
||||
t := time.Now()
|
||||
if len(paths) == 0 {
|
||||
paths = []string{"."}
|
||||
}
|
||||
pkgs, err := packages.Load(conf, paths...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stats.PackageLoading = time.Since(t)
|
||||
|
||||
var problems []lint.Problem
|
||||
workingPkgs := make([]*packages.Package, 0, len(pkgs))
|
||||
for _, pkg := range pkgs {
|
||||
if pkg.IllTyped {
|
||||
problems = append(problems, compileErrors(pkg)...)
|
||||
} else {
|
||||
workingPkgs = append(workingPkgs, pkg)
|
||||
}
|
||||
}
|
||||
|
||||
if len(workingPkgs) == 0 {
|
||||
return problems, nil
|
||||
}
|
||||
|
||||
l := &lint.Linter{
|
||||
Checkers: cs,
|
||||
Ignores: ignores,
|
||||
GoVersion: opt.GoVersion,
|
||||
ReturnIgnored: opt.ReturnIgnored,
|
||||
Config: opt.Config,
|
||||
|
||||
MaxConcurrentJobs: opt.MaxConcurrentJobs,
|
||||
PrintStats: opt.PrintStats,
|
||||
}
|
||||
problems = append(problems, l.Lint(workingPkgs, &stats)...)
|
||||
|
||||
return problems, nil
|
||||
}
|
||||
|
||||
func shortPath(path string) string {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return path
|
||||
var posRe = regexp.MustCompile(`^(.+?):(\d+):(\d+)?$`)
|
||||
|
||||
func parsePos(pos string) token.Position {
|
||||
if pos == "-" || pos == "" {
|
||||
return token.Position{}
|
||||
}
|
||||
if rel, err := filepath.Rel(cwd, path); err == nil && len(rel) < len(path) {
|
||||
return rel
|
||||
parts := posRe.FindStringSubmatch(pos)
|
||||
if parts == nil {
|
||||
panic(fmt.Sprintf("internal error: malformed position %q", pos))
|
||||
}
|
||||
file := parts[1]
|
||||
line, _ := strconv.Atoi(parts[2])
|
||||
col, _ := strconv.Atoi(parts[3])
|
||||
return token.Position{
|
||||
Filename: file,
|
||||
Line: line,
|
||||
Column: col,
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
func relativePositionString(pos token.Position) string {
|
||||
s := shortPath(pos.Filename)
|
||||
if pos.IsValid() {
|
||||
if s != "" {
|
||||
s += ":"
|
||||
func compileErrors(pkg *packages.Package) []lint.Problem {
|
||||
if !pkg.IllTyped {
|
||||
return nil
|
||||
}
|
||||
if len(pkg.Errors) == 0 {
|
||||
// transitively ill-typed
|
||||
var ps []lint.Problem
|
||||
for _, imp := range pkg.Imports {
|
||||
ps = append(ps, compileErrors(imp)...)
|
||||
}
|
||||
s += fmt.Sprintf("%d:%d", pos.Line, pos.Column)
|
||||
return ps
|
||||
}
|
||||
if s == "" {
|
||||
s = "-"
|
||||
var ps []lint.Problem
|
||||
for _, err := range pkg.Errors {
|
||||
p := lint.Problem{
|
||||
Position: parsePos(err.Pos),
|
||||
Text: err.Msg,
|
||||
Checker: "compiler",
|
||||
Check: "compile",
|
||||
}
|
||||
ps = append(ps, p)
|
||||
}
|
||||
return s
|
||||
return ps
|
||||
}
|
||||
|
||||
func ProcessArgs(name string, cs []CheckerConfig, args []string) {
|
||||
func ProcessArgs(name string, cs []lint.Checker, args []string) {
|
||||
flags := FlagSet(name)
|
||||
flags.Parse(args)
|
||||
|
||||
ProcessFlagSet(cs, flags, nil, nil, nil)
|
||||
}
|
||||
|
||||
func (runner *runner) lint(lprog *loader.Program, ssaProg *ssa.Program, conf *loader.Config) []lint.Problem {
|
||||
l := &lint.Linter{
|
||||
Checker: runner.checker,
|
||||
Ignores: runner.ignores,
|
||||
GoVersion: runner.version,
|
||||
ReturnIgnored: runner.returnIgnored,
|
||||
}
|
||||
return l.Lint(lprog, ssaProg, conf)
|
||||
ProcessFlagSet(cs, flags)
|
||||
}
|
||||
|
|
767
vendor/github.com/golangci/go-tools/simple/lint.go
generated
vendored
767
vendor/github.com/golangci/go-tools/simple/lint.go
generated
vendored
File diff suppressed because it is too large
Load diff
7
vendor/github.com/golangci/go-tools/simple/lint17.go
generated
vendored
7
vendor/github.com/golangci/go-tools/simple/lint17.go
generated
vendored
|
@ -1,7 +0,0 @@
|
|||
// +build !go1.8
|
||||
|
||||
package simple
|
||||
|
||||
import "go/types"
|
||||
|
||||
var structsIdentical = types.Identical
|
7
vendor/github.com/golangci/go-tools/simple/lint18.go
generated
vendored
7
vendor/github.com/golangci/go-tools/simple/lint18.go
generated
vendored
|
@ -1,7 +0,0 @@
|
|||
// +build go1.8
|
||||
|
||||
package simple
|
||||
|
||||
import "go/types"
|
||||
|
||||
var structsIdentical = types.IdenticalIgnoreTags
|
|
@ -1288,7 +1288,7 @@ func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) {
|
|||
// instead of BinOp(EQL, tag, b.expr(cond))
|
||||
// followed by If. Don't forget conversions
|
||||
// though.
|
||||
cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos)
|
||||
cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), cond.Pos())
|
||||
emitIf(fn, cond, body, nextCond)
|
||||
fn.currentBlock = nextCond
|
||||
}
|
|
@ -115,7 +115,7 @@ func (c *Const) IsNil() bool {
|
|||
return c.Value == nil
|
||||
}
|
||||
|
||||
// TODO(adonovan): move everything below into github.com/golangci/tools/go/ssa/interp.
|
||||
// TODO(adonovan): move everything below into github.com/golangci/go-tools/ssa/interp.
|
||||
|
||||
// Int64 returns the numeric value of this constant truncated to fit
|
||||
// a signed 64-bit integer.
|
|
@ -120,4 +120,4 @@
|
|||
// domains of source locations, ast.Nodes, types.Objects,
|
||||
// ssa.Values/Instructions.
|
||||
//
|
||||
package ssa // import "github.com/golangci/tools/go/ssa"
|
||||
package ssa // import "github.com/golangci/go-tools/ssa"
|
|
@ -265,6 +265,10 @@ func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.Func
|
|||
}
|
||||
}
|
||||
|
||||
type setNumable interface {
|
||||
setNum(int)
|
||||
}
|
||||
|
||||
// numberRegisters assigns numbers to all SSA registers
|
||||
// (value-defining Instructions) in f, to aid debugging.
|
||||
// (Non-Instruction Values are named at construction.)
|
||||
|
@ -275,9 +279,7 @@ func numberRegisters(f *Function) {
|
|||
for _, instr := range b.Instrs {
|
||||
switch instr.(type) {
|
||||
case Value:
|
||||
instr.(interface {
|
||||
setNum(int)
|
||||
}).setNum(v)
|
||||
instr.(setNumable).setNum(v)
|
||||
v++
|
||||
}
|
||||
}
|
|
@ -11,9 +11,9 @@ import (
|
|||
"go/token"
|
||||
"go/types"
|
||||
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
"golang.org/x/tools/go/loader"
|
||||
"golang.org/x/tools/go/packages"
|
||||
"github.com/golangci/go-tools/ssa"
|
||||
)
|
||||
|
||||
// Packages creates an SSA program for a set of packages loaded from
|
|
@ -24,7 +24,7 @@ import (
|
|||
"go/token"
|
||||
"go/types"
|
||||
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
"github.com/golangci/go-tools/ssa"
|
||||
)
|
||||
|
||||
// A ConstCase represents a single constant comparison.
|
|
@ -2,9 +2,9 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ssautil // import "github.com/golangci/tools/go/ssa/ssautil"
|
||||
package ssautil // import "github.com/golangci/go-tools/ssa/ssautil"
|
||||
|
||||
import "github.com/golangci/tools/go/ssa"
|
||||
import "github.com/golangci/go-tools/ssa"
|
||||
|
||||
// This file defines utilities for visiting the SSA representation of
|
||||
// a Program.
|
|
@ -21,6 +21,7 @@ package ssa
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"go/types"
|
||||
)
|
||||
|
41
vendor/github.com/golangci/go-tools/ssautil/ssautil.go
generated
vendored
Normal file
41
vendor/github.com/golangci/go-tools/ssautil/ssautil.go
generated
vendored
Normal file
|
@ -0,0 +1,41 @@
|
|||
package ssautil
|
||||
|
||||
import (
|
||||
"github.com/golangci/go-tools/ssa"
|
||||
)
|
||||
|
||||
func Reachable(from, to *ssa.BasicBlock) bool {
|
||||
if from == to {
|
||||
return true
|
||||
}
|
||||
if from.Dominates(to) {
|
||||
return true
|
||||
}
|
||||
|
||||
found := false
|
||||
Walk(from, func(b *ssa.BasicBlock) bool {
|
||||
if b == to {
|
||||
found = true
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
return found
|
||||
}
|
||||
|
||||
func Walk(b *ssa.BasicBlock, fn func(*ssa.BasicBlock) bool) {
|
||||
seen := map[*ssa.BasicBlock]bool{}
|
||||
wl := []*ssa.BasicBlock{b}
|
||||
for len(wl) > 0 {
|
||||
b := wl[len(wl)-1]
|
||||
wl = wl[:len(wl)-1]
|
||||
if seen[b] {
|
||||
continue
|
||||
}
|
||||
seen[b] = true
|
||||
if !fn(b) {
|
||||
continue
|
||||
}
|
||||
wl = append(wl, b.Succs...)
|
||||
}
|
||||
}
|
4
vendor/github.com/golangci/go-tools/staticcheck/buildtag.go
generated
vendored
4
vendor/github.com/golangci/go-tools/staticcheck/buildtag.go
generated
vendored
|
@ -4,12 +4,12 @@ import (
|
|||
"go/ast"
|
||||
"strings"
|
||||
|
||||
"github.com/golangci/go-tools/lint"
|
||||
. "github.com/golangci/go-tools/lint/lintdsl"
|
||||
)
|
||||
|
||||
func buildTags(f *ast.File) [][]string {
|
||||
var out [][]string
|
||||
for _, line := range strings.Split(lint.Preamble(f), "\n") {
|
||||
for _, line := range strings.Split(Preamble(f), "\n") {
|
||||
if !strings.HasPrefix(line, "+build ") {
|
||||
continue
|
||||
}
|
||||
|
|
1242
vendor/github.com/golangci/go-tools/staticcheck/lint.go
generated
vendored
1242
vendor/github.com/golangci/go-tools/staticcheck/lint.go
generated
vendored
File diff suppressed because it is too large
Load diff
7
vendor/github.com/golangci/go-tools/staticcheck/rules.go
generated
vendored
7
vendor/github.com/golangci/go-tools/staticcheck/rules.go
generated
vendored
|
@ -14,7 +14,8 @@ import (
|
|||
"unicode/utf8"
|
||||
|
||||
"github.com/golangci/go-tools/lint"
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
. "github.com/golangci/go-tools/lint/lintdsl"
|
||||
"github.com/golangci/go-tools/ssa"
|
||||
"github.com/golangci/go-tools/staticcheck/vrp"
|
||||
)
|
||||
|
||||
|
@ -193,7 +194,7 @@ func validEncodingBinaryType(j *lint.Job, typ types.Type) bool {
|
|||
types.Float32, types.Float64, types.Complex64, types.Complex128, types.Invalid:
|
||||
return true
|
||||
case types.Bool:
|
||||
return j.IsGoVersion(8)
|
||||
return IsGoVersion(j, 8)
|
||||
}
|
||||
return false
|
||||
case *types.Struct:
|
||||
|
@ -294,7 +295,7 @@ func ValidHostPort(v Value) bool {
|
|||
// ConvertedFrom reports whether value v was converted from type typ.
|
||||
func ConvertedFrom(v Value, typ string) bool {
|
||||
change, ok := v.Value.(*ssa.ChangeType)
|
||||
return ok && types.TypeString(change.X.Type(), nil) == typ
|
||||
return ok && IsType(change.X.Type(), typ)
|
||||
}
|
||||
|
||||
func UniqueStringCutset(v Value) bool {
|
||||
|
|
6
vendor/github.com/golangci/go-tools/staticcheck/vrp/channel.go
generated
vendored
6
vendor/github.com/golangci/go-tools/staticcheck/vrp/channel.go
generated
vendored
|
@ -3,7 +3,7 @@ package vrp
|
|||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/golangci/tools/go/ssa"
|
||||
"github.com/golangci/go-tools/ssa"
|
||||
)
|
||||
|
||||
type ChannelInterval struct {
|
||||
|
@ -54,10 +54,10 @@ func (c *MakeChannelConstraint) Operands() []ssa.Value { return []ssa.Valu
|
|||
func (c *ChannelChangeTypeConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
|
||||
|
||||
func (c *MakeChannelConstraint) String() string {
|
||||
return fmt.Sprintf("%s = make(chan, %s)", c.Y().Name, c.Buffer.Name())
|
||||
return fmt.Sprintf("%s = make(chan, %s)", c.Y().Name(), c.Buffer.Name())
|
||||
}
|
||||
func (c *ChannelChangeTypeConstraint) String() string {
|
||||
return fmt.Sprintf("%s = changetype(%s)", c.Y().Name, c.X.Name())
|
||||
return fmt.Sprintf("%s = changetype(%s)", c.Y().Name(), c.X.Name())
|
||||
}
|
||||
|
||||
func (c *MakeChannelConstraint) Eval(g *Graph) Range {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue