Make incremental analysis ()

Cache linting results. Reanalyze only changed packages
and packages tree depending on them.

Fixes , fixes 
This commit is contained in:
Isaev Denis 2019-10-13 17:40:51 +03:00 committed by GitHub
parent ca6effbeec
commit 9ba730e989
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
59 changed files with 933 additions and 422 deletions

View file

@ -43,6 +43,8 @@ linters-settings:
disabled-checks:
- wrapperFunc
- dupImport # https://github.com/go-critic/go-critic/issues/845
- ifElseChain
- octalLiteral
funlen:
lines: 100
statements: 50
@ -95,7 +97,17 @@ linters:
run:
skip-dirs:
- test/testdata_etc
- internal/(cache|renameio|robustio)
skip-files:
- internal/cache/.*_test.go
issues:
exclude-rules:
- path: internal/(cache|renameio)/
linters:
- lll
- gochecknoinits
- gocyclo
- funlen
# golangci.com configuration
# https://github.com/golangci/golangci/wiki/Configuration

View file

@ -945,6 +945,8 @@ linters-settings:
disabled-checks:
- wrapperFunc
- dupImport # https://github.com/go-critic/go-critic/issues/845
- ifElseChain
- octalLiteral
funlen:
lines: 100
statements: 50
@ -997,7 +999,17 @@ linters:
run:
skip-dirs:
- test/testdata_etc
- internal/(cache|renameio|robustio)
skip-files:
- internal/cache/.*_test.go
issues:
exclude-rules:
- path: internal/(cache|renameio)/
linters:
- lll
- gochecknoinits
- gocyclo
- funlen
# golangci.com configuration
# https://github.com/golangci/golangci/wiki/Configuration

View file

@ -12,7 +12,6 @@ import (
"bytes"
"crypto/sha256"
"encoding/hex"
"errors"
"fmt"
"io"
"io/ioutil"
@ -22,6 +21,8 @@ import (
"strings"
"time"
"github.com/pkg/errors"
"github.com/golangci/golangci-lint/internal/renameio"
)
@ -144,28 +145,35 @@ func (c *Cache) get(id ActionID) (Entry, error) {
missing := func() (Entry, error) {
return Entry{}, errMissing
}
f, err := os.Open(c.fileName(id, "a"))
failed := func(err error) (Entry, error) {
return Entry{}, err
}
fileName := c.fileName(id, "a")
f, err := os.Open(fileName)
if err != nil {
return missing()
if os.IsNotExist(err) {
return missing()
}
return failed(err)
}
defer f.Close()
entry := make([]byte, entrySize+1) // +1 to detect whether f is too long
if n, err := io.ReadFull(f, entry); n != entrySize || err != io.ErrUnexpectedEOF {
return missing()
if n, readErr := io.ReadFull(f, entry); n != entrySize || readErr != io.ErrUnexpectedEOF {
return failed(fmt.Errorf("read %d/%d bytes from %s with error %s", n, entrySize, fileName, readErr))
}
if entry[0] != 'v' || entry[1] != '1' || entry[2] != ' ' || entry[3+hexSize] != ' ' || entry[3+hexSize+1+hexSize] != ' ' || entry[3+hexSize+1+hexSize+1+20] != ' ' || entry[entrySize-1] != '\n' {
return missing()
return failed(fmt.Errorf("bad data in %s", fileName))
}
eid, entry := entry[3:3+hexSize], entry[3+hexSize:]
eout, entry := entry[1:1+hexSize], entry[1+hexSize:]
esize, entry := entry[1:1+20], entry[1+20:]
etime, entry := entry[1:1+20], entry[1+20:]
etime := entry[1 : 1+20]
var buf [HashSize]byte
if _, err := hex.Decode(buf[:], eid); err != nil || buf != id {
return missing()
if _, err = hex.Decode(buf[:], eid); err != nil || buf != id {
return failed(errors.Wrapf(err, "failed to hex decode eid data in %s", fileName))
}
if _, err := hex.Decode(buf[:], eout); err != nil {
return missing()
if _, err = hex.Decode(buf[:], eout); err != nil {
return failed(errors.Wrapf(err, "failed to hex decode eout data in %s", fileName))
}
i := 0
for i < len(esize) && esize[i] == ' ' {
@ -173,7 +181,7 @@ func (c *Cache) get(id ActionID) (Entry, error) {
}
size, err := strconv.ParseInt(string(esize[i:]), 10, 64)
if err != nil || size < 0 {
return missing()
return failed(fmt.Errorf("failed to parse esize int from %s with error %s", fileName, err))
}
i = 0
for i < len(etime) && etime[i] == ' ' {
@ -181,10 +189,12 @@ func (c *Cache) get(id ActionID) (Entry, error) {
}
tm, err := strconv.ParseInt(string(etime[i:]), 10, 64)
if err != nil || tm < 0 {
return missing()
return failed(fmt.Errorf("failed to parse etime int from %s with error %s", fileName, err))
}
c.used(c.fileName(id, "a"))
if err = c.used(fileName); err != nil {
return failed(errors.Wrapf(err, "failed to mark %s as used", fileName))
}
return Entry{buf, size, time.Unix(0, tm)}, nil
}
@ -196,7 +206,12 @@ func (c *Cache) GetFile(id ActionID) (file string, entry Entry, err error) {
if err != nil {
return "", Entry{}, err
}
file = c.OutputFile(entry.OutputID)
file, err = c.OutputFile(entry.OutputID)
if err != nil {
return "", Entry{}, err
}
info, err := os.Stat(file)
if err != nil || info.Size() != entry.Size {
return "", Entry{}, errMissing
@ -212,7 +227,16 @@ func (c *Cache) GetBytes(id ActionID) ([]byte, Entry, error) {
if err != nil {
return nil, entry, err
}
data, _ := ioutil.ReadFile(c.OutputFile(entry.OutputID))
outputFile, err := c.OutputFile(entry.OutputID)
if err != nil {
return nil, entry, err
}
data, err := ioutil.ReadFile(outputFile)
if err != nil {
return nil, entry, err
}
if sha256.Sum256(data) != entry.OutputID {
return nil, entry, errMissing
}
@ -220,10 +244,12 @@ func (c *Cache) GetBytes(id ActionID) ([]byte, Entry, error) {
}
// OutputFile returns the name of the cache file storing output with the given OutputID.
func (c *Cache) OutputFile(out OutputID) string {
func (c *Cache) OutputFile(out OutputID) (string, error) {
file := c.fileName(out, "d")
c.used(file)
return file
if err := c.used(file); err != nil {
return "", err
}
return file, nil
}
// Time constants for cache expiration.
@ -253,12 +279,21 @@ const (
// mtime is more than an hour old. This heuristic eliminates
// nearly all of the mtime updates that would otherwise happen,
// while still keeping the mtimes useful for cache trimming.
func (c *Cache) used(file string) {
func (c *Cache) used(file string) error {
info, err := os.Stat(file)
if err == nil && c.now().Sub(info.ModTime()) < mtimeInterval {
return
if err != nil {
return errors.Wrapf(err, "failed to stat file %s", file)
}
os.Chtimes(file, c.now(), c.now())
if c.now().Sub(info.ModTime()) < mtimeInterval {
return nil
}
if err := os.Chtimes(file, c.now(), c.now()); err != nil {
return errors.Wrapf(err, "failed to change time of file %s", file)
}
return nil
}
// Trim removes old cache entries that are likely not to be reused.
@ -285,7 +320,7 @@ func (c *Cache) Trim() {
// Ignore errors from here: if we don't write the complete timestamp, the
// cache will appear older than it is, and we'll trim it again next time.
renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix())), 0666)
_ = renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix())), 0666)
}
// trimSubdir trims a single cache subdirectory.
@ -367,7 +402,9 @@ func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify
os.Remove(file)
return err
}
os.Chtimes(file, c.now(), c.now()) // mainly for tests
if err = os.Chtimes(file, c.now(), c.now()); err != nil { // mainly for tests
return errors.Wrapf(err, "failed to change time of file %s", file)
}
return nil
}
@ -421,9 +458,12 @@ func (c *Cache) copyFile(file io.ReadSeeker, out OutputID, size int64) error {
info, err := os.Stat(name)
if err == nil && info.Size() == size {
// Check hash.
if f, err := os.Open(name); err == nil {
if f, openErr := os.Open(name); openErr == nil {
h := sha256.New()
io.Copy(h, f)
if _, copyErr := io.Copy(h, f); copyErr != nil {
return errors.Wrap(copyErr, "failed to copy to sha256")
}
f.Close()
var out2 OutputID
h.Sum(out2[:0])
@ -456,44 +496,49 @@ func (c *Cache) copyFile(file io.ReadSeeker, out OutputID, size int64) error {
// before returning, to avoid leaving bad bytes in the file.
// Copy file to f, but also into h to double-check hash.
if _, err := file.Seek(0, 0); err != nil {
f.Truncate(0)
if _, err = file.Seek(0, 0); err != nil {
_ = f.Truncate(0)
return err
}
h := sha256.New()
w := io.MultiWriter(f, h)
if _, err := io.CopyN(w, file, size-1); err != nil {
f.Truncate(0)
if _, err = io.CopyN(w, file, size-1); err != nil {
_ = f.Truncate(0)
return err
}
// Check last byte before writing it; writing it will make the size match
// what other processes expect to find and might cause them to start
// using the file.
buf := make([]byte, 1)
if _, err := file.Read(buf); err != nil {
f.Truncate(0)
if _, err = file.Read(buf); err != nil {
_ = f.Truncate(0)
return err
}
h.Write(buf)
if n, wErr := h.Write(buf); n != len(buf) {
return fmt.Errorf("wrote to hash %d/%d bytes with error %s", n, len(buf), wErr)
}
sum := h.Sum(nil)
if !bytes.Equal(sum, out[:]) {
f.Truncate(0)
_ = f.Truncate(0)
return fmt.Errorf("file content changed underfoot")
}
// Commit cache file entry.
if _, err := f.Write(buf); err != nil {
f.Truncate(0)
if _, err = f.Write(buf); err != nil {
_ = f.Truncate(0)
return err
}
if err := f.Close(); err != nil {
if err = f.Close(); err != nil {
// Data might not have been written,
// but file may look like it is the right size.
// To be extra careful, remove cached file.
os.Remove(name)
return err
}
os.Chtimes(name, c.now(), c.now()) // mainly for tests
if err = os.Chtimes(name, c.now(), c.now()); err != nil { // mainly for tests
return errors.Wrapf(err, "failed to change time of file %s", name)
}
return nil
}

View file

@ -39,7 +39,9 @@ func initDefaultCache() {
}
if _, err := os.Stat(filepath.Join(dir, "README")); err != nil {
// Best effort.
ioutil.WriteFile(filepath.Join(dir, "README"), []byte(cacheREADME), 0666)
if wErr := ioutil.WriteFile(filepath.Join(dir, "README"), []byte(cacheREADME), 0666); wErr != nil {
log.Fatalf("Failed to write README file to cache dir %s: %s", dir, err)
}
}
c, err := Open(dir)

View file

@ -42,11 +42,19 @@ func SetSalt(b []byte) {
// Subkey returns an action ID corresponding to mixing a parent
// action ID with a string description of the subkey.
func Subkey(parent ActionID, desc string) ActionID {
func Subkey(parent ActionID, desc string) (ActionID, error) {
h := sha256.New()
h.Write([]byte("subkey:"))
h.Write(parent[:])
h.Write([]byte(desc))
const subkeyPrefix = "subkey:"
if n, err := h.Write([]byte(subkeyPrefix)); n != len(subkeyPrefix) {
return ActionID{}, fmt.Errorf("wrote %d/%d bytes of subkey prefix with error %s", n, len(subkeyPrefix), err)
}
if n, err := h.Write(parent[:]); n != len(parent) {
return ActionID{}, fmt.Errorf("wrote %d/%d bytes of parent with error %s", n, len(parent), err)
}
if n, err := h.Write([]byte(desc)); n != len(desc) {
return ActionID{}, fmt.Errorf("wrote %d/%d bytes of desc with error %s", n, len(desc), err)
}
var out ActionID
h.Sum(out[:0])
if debugHash {
@ -57,21 +65,23 @@ func Subkey(parent ActionID, desc string) ActionID {
hashDebug.m[out] = fmt.Sprintf("subkey %x %q", parent, desc)
hashDebug.Unlock()
}
return out
return out, nil
}
// NewHash returns a new Hash.
// The caller is expected to Write data to it and then call Sum.
func NewHash(name string) *Hash {
func NewHash(name string) (*Hash, error) {
h := &Hash{h: sha256.New(), name: name}
if debugHash {
fmt.Fprintf(os.Stderr, "HASH[%s]\n", h.name)
}
h.Write(hashSalt)
if n, err := h.Write(hashSalt); n != len(hashSalt) {
return nil, fmt.Errorf("wrote %d/%d bytes of hash salt with error %s", n, len(hashSalt), err)
}
if verify {
h.buf = new(bytes.Buffer)
}
return h
return h, nil
}
// Write writes data to the running hash.

View file

@ -18,7 +18,7 @@ func TestHash(t *testing.T) {
hashSalt = oldSalt
}()
h := NewHash("alice")
h, _ := NewHash("alice")
h.Write([]byte("hello world"))
sum := fmt.Sprintf("%x", h.Sum())
want := "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"

View file

@ -61,7 +61,11 @@ func (c *Cache) Put(pkg *packages.Package, key string, data interface{}) error {
c.sw.TrackStage("key build", func() {
aID, err = c.pkgActionID(pkg)
if err == nil {
aID = cache.Subkey(aID, key)
subkey, subkeyErr := cache.Subkey(aID, key)
if subkeyErr != nil {
err = errors.Wrap(subkeyErr, "failed to build subkey")
}
aID = subkey
}
})
if err != nil {
@ -87,7 +91,11 @@ func (c *Cache) Get(pkg *packages.Package, key string, data interface{}) error {
c.sw.TrackStage("key build", func() {
aID, err = c.pkgActionID(pkg)
if err == nil {
aID = cache.Subkey(aID, key)
subkey, subkeyErr := cache.Subkey(aID, key)
if subkeyErr != nil {
err = errors.Wrap(subkeyErr, "failed to build subkey")
}
aID = subkey
}
})
if err != nil {
@ -123,7 +131,10 @@ func (c *Cache) pkgActionID(pkg *packages.Package) (cache.ActionID, error) {
return cache.ActionID{}, errors.Wrap(err, "failed to get package hash")
}
key := cache.NewHash("action ID")
key, err := cache.NewHash("action ID")
if err != nil {
return cache.ActionID{}, errors.Wrap(err, "failed to make a hash")
}
fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
fmt.Fprintf(key, "pkghash %s\n", hash)
@ -139,7 +150,11 @@ func (c *Cache) packageHash(pkg *packages.Package) (string, error) {
return cachedHash.(string), nil
}
key := cache.NewHash("package hash")
key, err := cache.NewHash("package hash")
if err != nil {
return "", errors.Wrap(err, "failed to make a hash")
}
fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
for _, f := range pkg.CompiledGoFiles {
c.ioSem <- struct{}{}

View file

@ -22,6 +22,8 @@ import (
"github.com/golangci/golangci-lint/internal/robustio"
)
const windowsOS = "windows"
func TestConcurrentReadsAndWrites(t *testing.T) {
dir, err := ioutil.TempDir("", "renameio")
if err != nil {
@ -115,7 +117,7 @@ func TestConcurrentReadsAndWrites(t *testing.T) {
}
var minWriteSuccesses int64 = attempts
if runtime.GOOS == "windows" {
if runtime.GOOS == windowsOS {
// Windows produces frequent "Access is denied" errors under heavy rename load.
// As long as those are the only errors and *some* of the writes succeed, we're happy.
minWriteSuccesses = attempts / 4
@ -128,7 +130,7 @@ func TestConcurrentReadsAndWrites(t *testing.T) {
}
var minReadSuccesses int64 = attempts
if runtime.GOOS == "windows" {
if runtime.GOOS == windowsOS {
// Windows produces frequent "Access is denied" errors under heavy rename load.
// As long as those are the only errors and *some* of the writes succeed, we're happy.
minReadSuccesses = attempts / 4

86
pkg/commands/cache.go Normal file
View file

@ -0,0 +1,86 @@
package commands
import (
"fmt"
"os"
"path/filepath"
"github.com/golangci/golangci-lint/pkg/fsutils"
"github.com/golangci/golangci-lint/internal/cache"
"github.com/spf13/cobra"
"github.com/golangci/golangci-lint/pkg/logutils"
)
func (e *Executor) initCache() {
cacheCmd := &cobra.Command{
Use: "cache",
Short: "Cache control and information",
Run: func(cmd *cobra.Command, args []string) {
if len(args) != 0 {
e.log.Fatalf("Usage: golangci-lint cache")
}
if err := cmd.Help(); err != nil {
e.log.Fatalf("Can't run cache: %s", err)
}
},
}
e.rootCmd.AddCommand(cacheCmd)
cacheCmd.AddCommand(&cobra.Command{
Use: "clean",
Short: "Clean cache",
Run: e.executeCleanCache,
})
cacheCmd.AddCommand(&cobra.Command{
Use: "status",
Short: "Show cache status",
Run: e.executeCacheStatus,
})
// TODO: add trim command?
}
func (e *Executor) executeCleanCache(_ *cobra.Command, args []string) {
if len(args) != 0 {
e.log.Fatalf("Usage: golangci-lint cache clean")
}
cacheDir := cache.DefaultDir()
if err := os.RemoveAll(cacheDir); err != nil {
e.log.Fatalf("Failed to remove dir %s: %s", cacheDir, err)
}
os.Exit(0)
}
func (e *Executor) executeCacheStatus(_ *cobra.Command, args []string) {
if len(args) != 0 {
e.log.Fatalf("Usage: golangci-lint cache status")
}
cacheDir := cache.DefaultDir()
fmt.Fprintf(logutils.StdOut, "Dir: %s\n", cacheDir)
cacheSizeBytes, err := dirSizeBytes(cacheDir)
if err == nil {
fmt.Fprintf(logutils.StdOut, "Size: %s\n", fsutils.PrettifyBytesCount(cacheSizeBytes))
}
os.Exit(0)
}
func dirSizeBytes(path string) (int64, error) {
var size int64
err := filepath.Walk(path, func(_ string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() {
size += info.Size()
}
return err
})
return size, err
}

View file

@ -1,7 +1,17 @@
package commands
import (
"bytes"
"crypto/sha256"
"encoding/json"
"fmt"
"io"
"os"
"github.com/golangci/golangci-lint/internal/cache"
"github.com/fatih/color"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
@ -83,6 +93,7 @@ func NewExecutor(version, commit, date string) *Executor {
e.initConfig()
e.initCompletion()
e.initVersion()
e.initCache()
// init e.cfg by values from config: flags parse will see these values
// like the default ones. It will overwrite them only if the same option
@ -118,6 +129,9 @@ func NewExecutor(version, commit, date string) *Executor {
e.loadGuard = load.NewGuard()
e.contextLoader = lint.NewContextLoader(e.cfg, e.log.Child("loader"), e.goenv,
e.lineCache, e.fileCache, e.pkgCache, e.loadGuard)
if err = e.initHashSalt(version); err != nil {
e.log.Fatalf("Failed to init hash salt: %s", err)
}
e.debugf("Initialized executor")
return e
}
@ -125,3 +139,59 @@ func NewExecutor(version, commit, date string) *Executor {
func (e *Executor) Execute() error {
return e.rootCmd.Execute()
}
func (e *Executor) initHashSalt(version string) error {
binSalt, err := computeBinarySalt(version)
if err != nil {
return errors.Wrap(err, "failed to calculate binary salt")
}
configSalt, err := computeConfigSalt(e.cfg)
if err != nil {
return errors.Wrap(err, "failed to calculate config salt")
}
var b bytes.Buffer
b.Write(binSalt)
b.Write(configSalt)
cache.SetSalt(b.Bytes())
return nil
}
func computeBinarySalt(version string) ([]byte, error) {
if version != "" && version != "(devel)" {
return []byte(version), nil
}
if logutils.HaveDebugTag("bin_salt") {
return []byte("debug"), nil
}
p, err := os.Executable()
if err != nil {
return nil, err
}
f, err := os.Open(p)
if err != nil {
return nil, err
}
defer f.Close()
h := sha256.New()
if _, err := io.Copy(h, f); err != nil {
return nil, err
}
return h.Sum(nil), nil
}
func computeConfigSalt(cfg *config.Config) ([]byte, error) {
configBytes, err := json.Marshal(cfg)
if err != nil {
return nil, errors.Wrap(err, "failed to json marshal config")
}
h := sha256.New()
if n, err := h.Write(configBytes); n != len(configBytes) {
return nil, fmt.Errorf("failed to hash config bytes: wrote %d/%d bytes, error: %s", n, len(configBytes), err)
}
return h.Sum(nil), nil
}

View file

@ -33,7 +33,7 @@ func (fc *FileCache) GetFileBytes(filePath string) ([]byte, error) {
return fileBytes, nil
}
func prettifyBytesCount(n int) string {
func PrettifyBytesCount(n int64) string {
const (
Multiplexer = 1024
KiB = 1 * Multiplexer
@ -54,14 +54,14 @@ func prettifyBytesCount(n int) string {
}
func (fc *FileCache) PrintStats(log logutils.Log) {
var size int
var size int64
var mapLen int
fc.files.Range(func(_, fileBytes interface{}) bool {
mapLen++
size += len(fileBytes.([]byte))
size += int64(len(fileBytes.([]byte)))
return true
})
log.Infof("File cache stats: %d entries of total size %s", mapLen, prettifyBytesCount(size))
log.Infof("File cache stats: %d entries of total size %s", mapLen, PrettifyBytesCount(size))
}

View file

@ -15,10 +15,10 @@ import (
func NewDeadcode() *goanalysis.Linter {
const linterName = "deadcode"
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (interface{}, error) {
prog := goanalysis.MakeFakeLoaderProgram(pass)
@ -26,13 +26,13 @@ func NewDeadcode() *goanalysis.Linter {
if err != nil {
return nil, err
}
res := make([]result.Issue, 0, len(issues))
res := make([]goanalysis.Issue, 0, len(issues))
for _, i := range issues {
res = append(res, result.Issue{
res = append(res, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: i.Pos,
Text: fmt.Sprintf("%s is unused", formatCode(i.UnusedIdentName, nil)),
FromLinter: linterName,
})
}, pass))
}
mu.Lock()
resIssues = append(resIssues, res...)
@ -46,7 +46,7 @@ func NewDeadcode() *goanalysis.Linter {
"Finds unused code",
[]*analysis.Analyzer{analyzer},
nil,
).WithIssuesReporter(func(*linter.Context) []result.Issue {
).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}

View file

@ -49,10 +49,10 @@ func setupDepguardPackages(dg *depguard.Depguard, lintCtx *linter.Context) {
func NewDepguard() *goanalysis.Linter {
const linterName = "depguard"
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -88,17 +88,17 @@ func NewDepguard() *goanalysis.Linter {
if dg.ListType == depguard.LTWhitelist {
msgSuffix = "is not in the whitelist"
}
res := make([]result.Issue, 0, len(issues))
res := make([]goanalysis.Issue, 0, len(issues))
for _, i := range issues {
userSuppliedMsgSuffix := dgSettings.PackagesWithErrorMessage[i.PackageName]
if userSuppliedMsgSuffix != "" {
userSuppliedMsgSuffix = ": " + userSuppliedMsgSuffix
}
res = append(res, result.Issue{
res = append(res, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: i.Position,
Text: fmt.Sprintf("%s %s%s", formatCode(i.PackageName, lintCtx.Cfg), msgSuffix, userSuppliedMsgSuffix),
FromLinter: linterName,
})
}, pass))
}
mu.Lock()
resIssues = append(resIssues, res...)
@ -106,7 +106,7 @@ func NewDepguard() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}

View file

@ -17,10 +17,10 @@ const dogsledLinterName = "dogsled"
func NewDogsled() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: dogsledLinterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -30,14 +30,16 @@ func NewDogsled() *goanalysis.Linter {
nil,
).WithContextSetter(func(lintCtx *linter.Context) {
analyzer.Run = func(pass *analysis.Pass) (interface{}, error) {
var pkgIssues []result.Issue
var pkgIssues []goanalysis.Issue
for _, f := range pass.Files {
v := returnsVisitor{
maxBlanks: lintCtx.Settings().Dogsled.MaxBlankIdentifiers,
f: pass.Fset,
}
ast.Walk(&v, f)
pkgIssues = append(pkgIssues, v.issues...)
for i := range v.issues {
pkgIssues = append(pkgIssues, goanalysis.NewIssue(&v.issues[i], pass))
}
}
mu.Lock()
@ -46,7 +48,7 @@ func NewDogsled() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -19,10 +19,10 @@ const duplLinterName = "dupl"
func NewDupl() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: duplLinterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -47,7 +47,7 @@ func NewDupl() *goanalysis.Linter {
return nil, nil
}
res := make([]result.Issue, 0, len(issues))
res := make([]goanalysis.Issue, 0, len(issues))
for _, i := range issues {
toFilename, err := fsutils.ShortestRelPath(i.To.Filename(), "")
if err != nil {
@ -57,7 +57,7 @@ func NewDupl() *goanalysis.Linter {
text := fmt.Sprintf("%d-%d lines are duplicate of %s",
i.From.LineStart(), i.From.LineEnd(),
formatCode(dupl, lintCtx.Cfg))
res = append(res, result.Issue{
res = append(res, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: token.Position{
Filename: i.From.Filename(),
Line: i.From.LineStart(),
@ -68,7 +68,7 @@ func NewDupl() *goanalysis.Linter {
},
Text: text,
FromLinter: duplLinterName,
})
}, pass))
}
mu.Lock()
@ -77,7 +77,7 @@ func NewDupl() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -24,9 +24,9 @@ import (
func NewErrcheck() *goanalysis.Linter {
const linterName = "errcheck"
var mu sync.Mutex
var res []result.Issue
var res []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -51,7 +51,7 @@ func NewErrcheck() *goanalysis.Linter {
return nil, nil
}
issues := make([]result.Issue, 0, len(errcheckIssues))
issues := make([]goanalysis.Issue, 0, len(errcheckIssues))
for _, i := range errcheckIssues {
var text string
if i.FuncName != "" {
@ -59,18 +59,18 @@ func NewErrcheck() *goanalysis.Linter {
} else {
text = "Error return value is not checked"
}
issues = append(issues, result.Issue{
issues = append(issues, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
FromLinter: linterName,
Text: text,
Pos: i.Pos,
})
}, pass))
}
mu.Lock()
res = append(res, issues...)
mu.Unlock()
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return res
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}

View file

@ -17,10 +17,10 @@ const funlenLinterName = "funlen"
func NewFunlen() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: funlenLinterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -40,16 +40,16 @@ func NewFunlen() *goanalysis.Linter {
return nil, nil
}
res := make([]result.Issue, len(issues))
res := make([]goanalysis.Issue, len(issues))
for k, i := range issues {
res[k] = result.Issue{
res[k] = goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: token.Position{
Filename: i.Pos.Filename,
Line: i.Pos.Line,
},
Text: strings.TrimRight(i.Message, "\n"),
FromLinter: funlenLinterName,
}
}, pass)
}
mu.Lock()
@ -58,7 +58,7 @@ func NewFunlen() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -0,0 +1,29 @@
package goanalysis
import (
"go/token"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/result"
)
type Issue struct {
result.Issue
Pass *analysis.Pass
}
func NewIssue(i *result.Issue, pass *analysis.Pass) Issue {
return Issue{
Issue: *i,
Pass: pass,
}
}
type EncodingIssue struct {
FromLinter string
Text string
Pos token.Position
LineRange *result.Range
Replacement *result.Replacement
}

View file

@ -4,7 +4,13 @@ import (
"context"
"flag"
"fmt"
"runtime"
"sort"
"strings"
"sync"
"time"
"github.com/golangci/golangci-lint/pkg/logutils"
"golang.org/x/tools/go/packages"
@ -30,6 +36,8 @@ const (
LoadModeWholeProgram
)
var issuesCacheDebugf = logutils.Debug("goanalysis/issues/cache")
func (loadMode LoadMode) String() string {
switch loadMode {
case LoadModeNone:
@ -45,14 +53,14 @@ func (loadMode LoadMode) String() string {
}
type Linter struct {
name, desc string
analyzers []*analysis.Analyzer
cfg map[string]map[string]interface{}
issuesReporter func(*linter.Context) []result.Issue
contextSetter func(*linter.Context)
loadMode LoadMode
useOriginalPackages bool
isTypecheckMode bool
name, desc string
analyzers []*analysis.Analyzer
cfg map[string]map[string]interface{}
issuesReporter func(*linter.Context) []Issue
contextSetter func(*linter.Context)
loadMode LoadMode
needUseOriginalPackages bool
isTypecheckModeOn bool
}
func NewLinter(name, desc string, analyzers []*analysis.Analyzer, cfg map[string]map[string]interface{}) *Linter {
@ -60,11 +68,11 @@ func NewLinter(name, desc string, analyzers []*analysis.Analyzer, cfg map[string
}
func (lnt *Linter) UseOriginalPackages() {
lnt.useOriginalPackages = true
lnt.needUseOriginalPackages = true
}
func (lnt *Linter) SetTypecheckMode() {
lnt.isTypecheckMode = true
lnt.isTypecheckModeOn = true
}
func (lnt *Linter) LoadMode() LoadMode {
@ -76,7 +84,7 @@ func (lnt *Linter) WithLoadMode(loadMode LoadMode) *Linter {
return lnt
}
func (lnt *Linter) WithIssuesReporter(r func(*linter.Context) []result.Issue) *Linter {
func (lnt *Linter) WithIssuesReporter(r func(*linter.Context) []Issue) *Linter {
lnt.issuesReporter = r
return lnt
}
@ -198,6 +206,7 @@ func buildIssuesFromErrorsForTypecheckMode(errs []error, lintCtx *linter.Context
uniqReportedIssues[err.Msg] = true
lintCtx.Log.Errorf("typechecking error: %s", err.Msg)
} else {
i.Pkg = itErr.Pkg // to save to cache later
issues = append(issues, *i)
}
}
@ -209,48 +218,249 @@ func buildIssues(diags []Diagnostic, linterNameBuilder func(diag *Diagnostic) st
var issues []result.Issue
for i := range diags {
diag := &diags[i]
linterName := linterNameBuilder(diag)
var text string
if diag.Analyzer.Name == TheOnlyAnalyzerName {
if diag.Analyzer.Name == linterName {
text = diag.Message
} else {
text = fmt.Sprintf("%s: %s", diag.Analyzer.Name, diag.Message)
}
issues = append(issues, result.Issue{
FromLinter: linterNameBuilder(diag),
FromLinter: linterName,
Text: text,
Pos: diag.Position,
Pkg: diag.Pkg,
})
}
return issues
}
func (lnt *Linter) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) {
func (lnt *Linter) preRun(lintCtx *linter.Context) error {
if err := analysis.Validate(lnt.analyzers); err != nil {
return nil, errors.Wrap(err, "failed to validate analyzers")
return errors.Wrap(err, "failed to validate analyzers")
}
if err := lnt.configure(); err != nil {
return nil, errors.Wrap(err, "failed to configure analyzers")
return errors.Wrap(err, "failed to configure analyzers")
}
if lnt.contextSetter != nil {
lnt.contextSetter(lintCtx)
}
loadMode := lnt.loadMode
runner := newRunner(lnt.name, lintCtx.Log.Child("goanalysis"),
lintCtx.PkgCache, lintCtx.LoadGuard, loadMode)
return nil
}
func (lnt *Linter) getName() string {
return lnt.name
}
func (lnt *Linter) getLinterNameForDiagnostic(*Diagnostic) string {
return lnt.name
}
func (lnt *Linter) getAnalyzers() []*analysis.Analyzer {
return lnt.analyzers
}
func (lnt *Linter) useOriginalPackages() bool {
return lnt.needUseOriginalPackages
}
func (lnt *Linter) isTypecheckMode() bool {
return lnt.isTypecheckModeOn
}
func (lnt *Linter) reportIssues(lintCtx *linter.Context) []Issue {
if lnt.issuesReporter != nil {
return lnt.issuesReporter(lintCtx)
}
return nil
}
func (lnt *Linter) getLoadMode() LoadMode {
return lnt.loadMode
}
type runAnalyzersConfig interface {
getName() string
getLinterNameForDiagnostic(*Diagnostic) string
getAnalyzers() []*analysis.Analyzer
useOriginalPackages() bool
isTypecheckMode() bool
reportIssues(*linter.Context) []Issue
getLoadMode() LoadMode
}
func getIssuesCacheKey(analyzers []*analysis.Analyzer) string {
return "lint/result:" + analyzersHashID(analyzers)
}
func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.Package]bool,
issues []result.Issue, lintCtx *linter.Context, analyzers []*analysis.Analyzer) {
startedAt := time.Now()
perPkgIssues := map[*packages.Package][]result.Issue{}
for ind := range issues {
i := &issues[ind]
perPkgIssues[i.Pkg] = append(perPkgIssues[i.Pkg], *i)
}
savedIssuesCount := 0
lintResKey := getIssuesCacheKey(analyzers)
for _, pkg := range allPkgs {
if pkgsFromCache[pkg] {
continue
}
pkgIssues := perPkgIssues[pkg]
encodedIssues := make([]EncodingIssue, 0, len(pkgIssues))
for ind := range pkgIssues {
i := &pkgIssues[ind]
encodedIssues = append(encodedIssues, EncodingIssue{
FromLinter: i.FromLinter,
Text: i.Text,
Pos: i.Pos,
LineRange: i.LineRange,
Replacement: i.Replacement,
})
}
savedIssuesCount += len(encodedIssues)
if err := lintCtx.PkgCache.Put(pkg, lintResKey, encodedIssues); err != nil {
lintCtx.Log.Infof("Failed to save package %s issues (%d) to cache: %s", pkg, len(pkgIssues), err)
} else {
issuesCacheDebugf("Saved package %s issues (%d) to cache", pkg, len(pkgIssues))
}
}
issuesCacheDebugf("Saved %d issues from %d packages to cache in %s", savedIssuesCount, len(allPkgs), time.Since(startedAt))
}
//nolint:gocritic
func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context,
analyzers []*analysis.Analyzer) ([]result.Issue, map[*packages.Package]bool) {
startedAt := time.Now()
lintResKey := getIssuesCacheKey(analyzers)
type cacheRes struct {
issues []result.Issue
loadErr error
}
pkgToCacheRes := make(map[*packages.Package]*cacheRes, len(pkgs))
for _, pkg := range pkgs {
pkgToCacheRes[pkg] = &cacheRes{}
}
workerCount := runtime.GOMAXPROCS(-1)
var wg sync.WaitGroup
wg.Add(workerCount)
pkgCh := make(chan *packages.Package, len(pkgs))
for i := 0; i < workerCount; i++ {
go func() {
defer wg.Done()
for pkg := range pkgCh {
var pkgIssues []EncodingIssue
err := lintCtx.PkgCache.Get(pkg, lintResKey, &pkgIssues)
cacheRes := pkgToCacheRes[pkg]
cacheRes.loadErr = err
if err != nil {
continue
}
if len(pkgIssues) == 0 {
continue
}
issues := make([]result.Issue, 0, len(pkgIssues))
for _, i := range pkgIssues {
issues = append(issues, result.Issue{
FromLinter: i.FromLinter,
Text: i.Text,
Pos: i.Pos,
LineRange: i.LineRange,
Replacement: i.Replacement,
Pkg: pkg,
})
}
cacheRes.issues = issues
}
}()
}
for _, pkg := range pkgs {
pkgCh <- pkg
}
close(pkgCh)
wg.Wait()
loadedIssuesCount := 0
var issues []result.Issue
pkgsFromCache := map[*packages.Package]bool{}
for pkg, cacheRes := range pkgToCacheRes {
if cacheRes.loadErr == nil {
loadedIssuesCount += len(cacheRes.issues)
pkgsFromCache[pkg] = true
issues = append(issues, cacheRes.issues...)
issuesCacheDebugf("Loaded package %s issues (%d) from cache", pkg, len(cacheRes.issues))
} else {
issuesCacheDebugf("Didn't load package %s issues from cache: %s", pkg, cacheRes.loadErr)
}
}
issuesCacheDebugf("Loaded %d issues from cache in %s, analyzing %d/%d packages",
loadedIssuesCount, time.Since(startedAt), len(pkgs)-len(pkgsFromCache), len(pkgs))
return issues, pkgsFromCache
}
func runAnalyzers(cfg runAnalyzersConfig, lintCtx *linter.Context) ([]result.Issue, error) {
runner := newRunner(cfg.getName(), lintCtx.Log.Child("goanalysis"),
lintCtx.PkgCache, lintCtx.LoadGuard, cfg.getLoadMode())
pkgs := lintCtx.Packages
if lnt.useOriginalPackages {
if cfg.useOriginalPackages() {
pkgs = lintCtx.OriginalPackages
}
diags, errs := runner.run(lnt.analyzers, pkgs)
issues, pkgsFromCache := loadIssuesFromCache(pkgs, lintCtx, cfg.getAnalyzers())
var pkgsToAnalyze []*packages.Package
for _, pkg := range pkgs {
if !pkgsFromCache[pkg] {
pkgsToAnalyze = append(pkgsToAnalyze, pkg)
}
}
linterNameBuilder := func(*Diagnostic) string { return lnt.Name() }
if lnt.isTypecheckMode {
return buildIssuesFromErrorsForTypecheckMode(errs, lintCtx)
diags, errs, passToPkg := runner.run(cfg.getAnalyzers(), pkgsToAnalyze)
defer func() {
if len(errs) == 0 {
// If we try to save to cache even if we have compilation errors
// we won't see them on repeated runs.
saveIssuesToCache(pkgs, pkgsFromCache, issues, lintCtx, cfg.getAnalyzers())
}
}()
buildAllIssues := func() []result.Issue {
var retIssues []result.Issue
reportedIssues := cfg.reportIssues(lintCtx)
for i := range reportedIssues {
issue := &reportedIssues[i].Issue
if issue.Pkg == nil {
issue.Pkg = passToPkg[reportedIssues[i].Pass]
}
retIssues = append(retIssues, *issue)
}
retIssues = append(retIssues, buildIssues(diags, cfg.getLinterNameForDiagnostic)...)
return retIssues
}
if cfg.isTypecheckMode() {
errIssues, err := buildIssuesFromErrorsForTypecheckMode(errs, lintCtx)
if err != nil {
return nil, err
}
issues = append(issues, errIssues...)
issues = append(issues, buildAllIssues()...)
return issues, nil
}
// Don't print all errs: they can duplicate.
@ -258,12 +468,24 @@ func (lnt *Linter) Run(ctx context.Context, lintCtx *linter.Context) ([]result.I
return nil, errs[0]
}
var issues []result.Issue
if lnt.issuesReporter != nil {
issues = append(issues, lnt.issuesReporter(lintCtx)...)
} else {
issues = buildIssues(diags, linterNameBuilder)
}
issues = append(issues, buildAllIssues()...)
return issues, nil
}
func (lnt *Linter) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) {
if err := lnt.preRun(lintCtx); err != nil {
return nil, err
}
return runAnalyzers(lnt, lintCtx)
}
func analyzersHashID(analyzers []*analysis.Analyzer) string {
names := make([]string, 0, len(analyzers))
for _, a := range analyzers {
names = append(names, a.Name)
}
sort.Strings(names)
return strings.Join(names, ",")
}

View file

@ -11,11 +11,14 @@ import (
)
type MetaLinter struct {
linters []*Linter
linters []*Linter
analyzerToLinterName map[*analysis.Analyzer]string
}
func NewMetaLinter(linters []*Linter) *MetaLinter {
return &MetaLinter{linters: linters}
ml := &MetaLinter{linters: linters}
ml.analyzerToLinterName = ml.getAnalyzerToLinterNameMapping()
return ml
}
func (ml MetaLinter) Name() string {
@ -28,7 +31,7 @@ func (ml MetaLinter) Desc() string {
func (ml MetaLinter) isTypecheckMode() bool {
for _, linter := range ml.linters {
if linter.isTypecheckMode {
if linter.isTypecheckMode() {
return true
}
}
@ -45,15 +48,7 @@ func (ml MetaLinter) getLoadMode() LoadMode {
return loadMode
}
func (ml MetaLinter) runContextSetters(lintCtx *linter.Context) {
for _, linter := range ml.linters {
if linter.contextSetter != nil {
linter.contextSetter(lintCtx)
}
}
}
func (ml MetaLinter) getAllAnalyzers() []*analysis.Analyzer {
func (ml MetaLinter) getAnalyzers() []*analysis.Analyzer {
var allAnalyzers []*analysis.Analyzer
for _, linter := range ml.linters {
allAnalyzers = append(allAnalyzers, linter.analyzers...)
@ -61,6 +56,28 @@ func (ml MetaLinter) getAllAnalyzers() []*analysis.Analyzer {
return allAnalyzers
}
func (ml MetaLinter) getName() string {
return "metalinter"
}
func (ml MetaLinter) useOriginalPackages() bool {
return false // `unused` can't be run by this metalinter
}
func (ml MetaLinter) reportIssues(lintCtx *linter.Context) []Issue {
var ret []Issue
for _, lnt := range ml.linters {
if lnt.issuesReporter != nil {
ret = append(ret, lnt.issuesReporter(lintCtx)...)
}
}
return ret
}
func (ml MetaLinter) getLinterNameForDiagnostic(diag *Diagnostic) string {
return ml.analyzerToLinterName[diag.Analyzer]
}
func (ml MetaLinter) getAnalyzerToLinterNameMapping() map[*analysis.Analyzer]string {
analyzerToLinterName := map[*analysis.Analyzer]string{}
for _, linter := range ml.linters {
@ -71,65 +88,12 @@ func (ml MetaLinter) getAnalyzerToLinterNameMapping() map[*analysis.Analyzer]str
return analyzerToLinterName
}
func (ml MetaLinter) configure() error {
for _, linter := range ml.linters {
if err := linter.configure(); err != nil {
return errors.Wrapf(err, "failed to configure analyzers of %s", linter.Name())
}
}
return nil
}
func (ml MetaLinter) validate() error {
for _, linter := range ml.linters {
if err := analysis.Validate(linter.analyzers); err != nil {
return errors.Wrapf(err, "failed to validate analyzers of %s", linter.Name())
}
}
return nil
}
func (ml MetaLinter) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) {
if err := ml.validate(); err != nil {
return nil, err
}
if err := ml.configure(); err != nil {
return nil, err
}
ml.runContextSetters(lintCtx)
analyzerToLinterName := ml.getAnalyzerToLinterNameMapping()
runner := newRunner("metalinter", lintCtx.Log.Child("goanalysis"),
lintCtx.PkgCache, lintCtx.LoadGuard, ml.getLoadMode())
diags, errs := runner.run(ml.getAllAnalyzers(), lintCtx.Packages)
buildAllIssues := func() []result.Issue {
linterNameBuilder := func(diag *Diagnostic) string { return analyzerToLinterName[diag.Analyzer] }
issues := buildIssues(diags, linterNameBuilder)
for _, linter := range ml.linters {
if linter.issuesReporter != nil {
issues = append(issues, linter.issuesReporter(lintCtx)...)
}
for _, linter := range ml.linters {
if err := linter.preRun(lintCtx); err != nil {
return nil, errors.Wrapf(err, "failed to pre-run %s", linter.Name())
}
return issues
}
if ml.isTypecheckMode() {
issues, err := buildIssuesFromErrorsForTypecheckMode(errs, lintCtx)
if err != nil {
return nil, err
}
return append(issues, buildAllIssues()...), nil
}
// Don't print all errs: they can duplicate.
if len(errs) != 0 {
return nil, errs[0]
}
return buildAllIssues(), nil
return runAnalyzers(ml, lintCtx)
}

View file

@ -69,14 +69,17 @@ type Diagnostic struct {
analysis.Diagnostic
Analyzer *analysis.Analyzer
Position token.Position
Pkg *packages.Package
}
type runner struct {
log logutils.Log
prefix string // ensure unique analyzer names
pkgCache *pkgcache.Cache
loadGuard *load.Guard
loadMode LoadMode
log logutils.Log
prefix string // ensure unique analyzer names
pkgCache *pkgcache.Cache
loadGuard *load.Guard
loadMode LoadMode
passToPkg map[*analysis.Pass]*packages.Package
passToPkgGuard sync.Mutex
}
func newRunner(prefix string, logger logutils.Log, pkgCache *pkgcache.Cache, loadGuard *load.Guard, loadMode LoadMode) *runner {
@ -86,6 +89,7 @@ func newRunner(prefix string, logger logutils.Log, pkgCache *pkgcache.Cache, loa
pkgCache: pkgCache,
loadGuard: loadGuard,
loadMode: loadMode,
passToPkg: map[*analysis.Pass]*packages.Package{},
}
}
@ -95,12 +99,14 @@ func newRunner(prefix string, logger logutils.Log, pkgCache *pkgcache.Cache, loa
// It provides most of the logic for the main functions of both the
// singlechecker and the multi-analysis commands.
// It returns the appropriate exit code.
func (r *runner) run(analyzers []*analysis.Analyzer, initialPackages []*packages.Package) ([]Diagnostic, []error) {
func (r *runner) run(analyzers []*analysis.Analyzer, initialPackages []*packages.Package) ([]Diagnostic,
[]error, map[*analysis.Pass]*packages.Package) {
debugf("Analyzing %d packages on load mode %s", len(initialPackages), r.loadMode)
defer r.pkgCache.Trim()
roots := r.analyze(initialPackages, analyzers)
return extractDiagnostics(roots)
diags, errs := extractDiagnostics(roots)
return diags, errs, r.passToPkg
}
type actKey struct {
@ -138,9 +144,7 @@ func (r *runner) makeAction(a *analysis.Analyzer, pkg *packages.Package,
act = actAlloc.alloc()
act.a = a
act.pkg = pkg
act.log = r.log
act.prefix = r.prefix
act.pkgCache = r.pkgCache
act.r = r
act.isInitialPkg = initialPkgs[pkg]
act.needAnalyzeSource = initialPkgs[pkg]
act.analysisDoneCh = make(chan struct{})
@ -362,7 +366,13 @@ func extractDiagnostics(roots []*action) (retDiags []Diagnostic, retErrors []err
}
seen[k] = true
retDiags = append(retDiags, Diagnostic{Diagnostic: diag, Analyzer: act.a, Position: posn})
retDiag := Diagnostic{
Diagnostic: diag,
Analyzer: act.a,
Position: posn,
Pkg: act.pkg,
}
retDiags = append(retDiags, retDiag)
}
}
}
@ -404,9 +414,7 @@ type action struct {
result interface{}
diagnostics []analysis.Diagnostic
err error
log logutils.Log
prefix string
pkgCache *pkgcache.Cache
r *runner
analysisDoneCh chan struct{}
loadCachedFactsDone bool
loadCachedFactsOk bool
@ -483,22 +491,8 @@ func (act *action) analyze() {
return
}
// TODO(adonovan): uncomment this during profiling.
// It won't build pre-go1.11 but conditional compilation
// using build tags isn't warranted.
//
// ctx, task := trace.NewTask(context.Background(), "exec")
// trace.Log(ctx, "pass", act.String())
// defer task.End()
// Record time spent in this node but not its dependencies.
// In parallel mode, due to GC/scheduler contention, the
// time is 5x higher than in sequential mode, even with a
// semaphore limiting the number of threads here.
// So use -debug=tp.
defer func(now time.Time) {
analyzeDebugf("go/analysis: %s: %s: analyzed package %q in %s", act.prefix, act.a.Name, act.pkg.Name, time.Since(now))
analyzeDebugf("go/analysis: %s: %s: analyzed package %q in %s", act.r.prefix, act.a.Name, act.pkg.Name, time.Since(now))
}(time.Now())
// Report an error if any dependency failed.
@ -552,6 +546,9 @@ func (act *action) analyze() {
AllPackageFacts: act.allPackageFacts,
}
act.pass = pass
act.r.passToPkgGuard.Lock()
act.r.passToPkg[pass] = act.pkg
act.r.passToPkgGuard.Unlock()
var err error
if act.pkg.IllTyped {
@ -574,7 +571,7 @@ func (act *action) analyze() {
pass.ExportPackageFact = nil
if err := act.persistFactsToCache(); err != nil {
act.log.Warnf("Failed to persist facts to cache: %s", err)
act.r.log.Warnf("Failed to persist facts to cache: %s", err)
}
}
@ -598,7 +595,7 @@ func inheritFacts(act, dep *action) {
var err error
fact, err = codeFact(fact)
if err != nil {
act.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
}
}
@ -618,7 +615,7 @@ func inheritFacts(act, dep *action) {
var err error
fact, err = codeFact(fact)
if err != nil {
act.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
}
}
@ -695,7 +692,7 @@ func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool {
// exportObjectFact implements Pass.ExportObjectFact.
func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) {
if obj.Pkg() != act.pkg.Types {
act.log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package",
act.r.log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package",
act.a, act.pkg, obj, fact)
}
@ -756,7 +753,7 @@ func (act *action) allPackageFacts() []analysis.PackageFact {
func (act *action) factType(fact analysis.Fact) reflect.Type {
t := reflect.TypeOf(fact)
if t.Kind() != reflect.Ptr {
act.log.Fatalf("invalid Fact type: got %T, want pointer", t)
act.r.log.Fatalf("invalid Fact type: got %T, want pointer", t)
}
return t
}
@ -806,15 +803,15 @@ func (act *action) persistFactsToCache() error {
factsCacheDebugf("Caching %d facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name)
key := fmt.Sprintf("%s/facts", analyzer.Name)
return act.pkgCache.Put(act.pkg, key, facts)
return act.r.pkgCache.Put(act.pkg, key, facts)
}
func (act *action) loadPersistedFacts() bool {
var facts []Fact
key := fmt.Sprintf("%s/facts", act.a.Name)
if err := act.pkgCache.Get(act.pkg, key, &facts); err != nil {
if err := act.r.pkgCache.Get(act.pkg, key, &facts); err != nil {
if err != pkgcache.ErrMissing {
act.log.Warnf("Failed to get persisted facts: %s", err)
act.r.log.Warnf("Failed to get persisted facts: %s", err)
}
factsCacheDebugf("No cached facts for package %q and analyzer %s", act.pkg.Name, act.a.Name)

View file

@ -19,15 +19,18 @@ const gochecknoglobalsName = "gochecknoglobals"
//nolint:dupl
func NewGochecknoglobals() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: gochecknoglobalsName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (interface{}, error) {
var res []result.Issue
var res []goanalysis.Issue
for _, file := range pass.Files {
res = append(res, checkFileForGlobals(file, pass.Fset)...)
fileIssues := checkFileForGlobals(file, pass.Fset)
for i := range fileIssues {
res = append(res, goanalysis.NewIssue(&fileIssues[i], pass))
}
}
if len(res) == 0 {
return nil, nil
@ -45,7 +48,7 @@ func NewGochecknoglobals() *goanalysis.Linter {
"Checks that no globals are present in Go code",
[]*analysis.Analyzer{analyzer},
nil,
).WithIssuesReporter(func(*linter.Context) []result.Issue {
).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -18,15 +18,18 @@ const gochecknoinitsName = "gochecknoinits"
//nolint:dupl
func NewGochecknoinits() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: gochecknoinitsName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (interface{}, error) {
var res []result.Issue
var res []goanalysis.Issue
for _, file := range pass.Files {
res = append(res, checkFileForInits(file, pass.Fset)...)
fileIssues := checkFileForInits(file, pass.Fset)
for i := range fileIssues {
res = append(res, goanalysis.NewIssue(&fileIssues[i], pass))
}
}
if len(res) == 0 {
return nil, nil
@ -44,7 +47,7 @@ func NewGochecknoinits() *goanalysis.Linter {
"Checks that no init functions are present in Go code",
[]*analysis.Analyzer{analyzer},
nil,
).WithIssuesReporter(func(*linter.Context) []result.Issue {
).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -18,7 +18,7 @@ const gocognitName = "gocognit"
func NewGocognit() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
@ -43,18 +43,18 @@ func NewGocognit() *goanalysis.Linter {
return stats[i].Complexity > stats[j].Complexity
})
res := make([]result.Issue, 0, len(stats))
res := make([]goanalysis.Issue, 0, len(stats))
for _, s := range stats {
if s.Complexity <= lintCtx.Settings().Gocognit.MinComplexity {
break // Break as the stats is already sorted from greatest to least
}
res = append(res, result.Issue{
res = append(res, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: s.Pos,
Text: fmt.Sprintf("cognitive complexity %d of func %s is high (> %d)",
s.Complexity, formatCode(s.FuncName, lintCtx.Cfg), lintCtx.Settings().Gocognit.MinComplexity),
FromLinter: gocognitName,
})
}, pass))
}
mu.Lock()
@ -63,7 +63,7 @@ func NewGocognit() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -16,10 +16,10 @@ const goconstName = "goconst"
func NewGoconst() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: goconstName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -40,12 +40,12 @@ func NewGoconst() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}
func checkConstants(pass *analysis.Pass, lintCtx *linter.Context) ([]result.Issue, error) {
func checkConstants(pass *analysis.Pass, lintCtx *linter.Context) ([]goanalysis.Issue, error) {
cfg := goconstAPI.Config{
MatchWithConstants: true,
MinStringLength: lintCtx.Settings().Goconst.MinStringLen,
@ -61,7 +61,7 @@ func checkConstants(pass *analysis.Pass, lintCtx *linter.Context) ([]result.Issu
return nil, nil
}
res := make([]result.Issue, 0, len(goconstIssues))
res := make([]goanalysis.Issue, 0, len(goconstIssues))
for _, i := range goconstIssues {
textBegin := fmt.Sprintf("string %s has %d occurrences", formatCode(i.Str, lintCtx.Cfg), i.OccurencesCount)
var textEnd string
@ -70,11 +70,11 @@ func checkConstants(pass *analysis.Pass, lintCtx *linter.Context) ([]result.Issu
} else {
textEnd = fmt.Sprintf(", but such constant %s already exists", formatCode(i.MatchingConst, lintCtx.Cfg))
}
res = append(res, result.Issue{
res = append(res, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: i.Pos,
Text: textBegin + textEnd,
FromLinter: goconstName,
})
}, pass))
}
return res, nil

View file

@ -23,12 +23,12 @@ const gocriticName = "gocritic"
func NewGocritic() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
sizes := types.SizesFor("gc", runtime.GOARCH)
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: gocriticName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -45,7 +45,11 @@ func NewGocritic() *goanalysis.Linter {
}
lintpackCtx.SetPackageInfo(pass.TypesInfo, pass.Pkg)
res := runGocriticOnPackage(lintpackCtx, enabledCheckers, pass.Files)
var res []goanalysis.Issue
pkgIssues := runGocriticOnPackage(lintpackCtx, enabledCheckers, pass.Files)
for i := range pkgIssues {
res = append(res, goanalysis.NewIssue(&pkgIssues[i], pass))
}
if len(res) == 0 {
return nil, nil
}
@ -56,7 +60,7 @@ func NewGocritic() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}

View file

@ -18,10 +18,10 @@ const gocycloName = "gocyclo"
func NewGocyclo() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: gocycloName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -43,18 +43,18 @@ func NewGocyclo() *goanalysis.Linter {
return stats[i].Complexity > stats[j].Complexity
})
res := make([]result.Issue, 0, len(stats))
res := make([]goanalysis.Issue, 0, len(stats))
for _, s := range stats {
if s.Complexity <= lintCtx.Settings().Gocyclo.MinComplexity {
break // Break as the stats is already sorted from greatest to least
}
res = append(res, result.Issue{
res = append(res, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: s.Pos,
Text: fmt.Sprintf("cyclomatic complexity %d of func %s is high (> %d)",
s.Complexity, formatCode(s.FuncName, lintCtx.Cfg), lintCtx.Settings().Gocyclo.MinComplexity),
FromLinter: gocycloName,
})
}, pass))
}
mu.Lock()
@ -63,7 +63,7 @@ func NewGocyclo() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -17,10 +17,10 @@ const godoxName = "godox"
func NewGodox() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: godoxName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -39,16 +39,16 @@ func NewGodox() *goanalysis.Linter {
return nil, nil
}
res := make([]result.Issue, len(issues))
res := make([]goanalysis.Issue, len(issues))
for k, i := range issues {
res[k] = result.Issue{
res[k] = goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: token.Position{
Filename: i.Pos.Filename,
Line: i.Pos.Line,
},
Text: strings.TrimRight(i.Message, "\n"),
FromLinter: godoxName,
}
}, pass)
}
mu.Lock()
@ -57,7 +57,7 @@ func NewGodox() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -9,17 +9,16 @@ import (
"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/result"
)
const gofmtName = "gofmt"
func NewGofmt() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: gofmtName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -36,7 +35,7 @@ func NewGofmt() *goanalysis.Linter {
fileNames = append(fileNames, pos.Filename)
}
var issues []result.Issue
var issues []goanalysis.Issue
for _, f := range fileNames {
diff, err := gofmtAPI.Run(f, lintCtx.Settings().Gofmt.Simplify)
@ -52,7 +51,9 @@ func NewGofmt() *goanalysis.Linter {
return nil, errors.Wrapf(err, "can't extract issues from gofmt diff output %q", string(diff))
}
issues = append(issues, is...)
for i := range is {
issues = append(issues, goanalysis.NewIssue(&is[i], pass))
}
}
if len(issues) == 0 {
@ -65,7 +66,7 @@ func NewGofmt() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -10,17 +10,16 @@ import (
"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/result"
)
const goimportsName = "goimports"
func NewGoimports() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: goimportsName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -37,7 +36,7 @@ func NewGoimports() *goanalysis.Linter {
fileNames = append(fileNames, pos.Filename)
}
var issues []result.Issue
var issues []goanalysis.Issue
for _, f := range fileNames {
diff, err := goimportsAPI.Run(f)
@ -53,7 +52,9 @@ func NewGoimports() *goanalysis.Linter {
return nil, errors.Wrapf(err, "can't extract issues from gofmt diff output %q", string(diff))
}
issues = append(issues, is...)
for i := range is {
issues = append(issues, goanalysis.NewIssue(&is[i], pass))
}
}
if len(issues) == 0 {
@ -66,7 +67,7 @@ func NewGoimports() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -44,10 +44,10 @@ const golintName = "golint"
func NewGolint() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: golintName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -63,12 +63,14 @@ func NewGolint() *goanalysis.Linter {
}
mu.Lock()
resIssues = append(resIssues, res...)
for i := range res {
resIssues = append(resIssues, goanalysis.NewIssue(&res[i], pass))
}
mu.Unlock()
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -22,14 +22,14 @@ const gosecName = "gosec"
func NewGosec() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
gasConfig := gosec.NewConfig()
enabledRules := rules.Generate()
logger := log.New(ioutil.Discard, "", 0)
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: gosecName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -53,7 +53,7 @@ func NewGosec() *goanalysis.Linter {
return nil, nil
}
res := make([]result.Issue, 0, len(issues))
res := make([]goanalysis.Issue, 0, len(issues))
for _, i := range issues {
text := fmt.Sprintf("%s: %s", i.RuleID, i.What) // TODO: use severity and confidence
var r *result.Range
@ -67,7 +67,7 @@ func NewGosec() *goanalysis.Linter {
line = r.From
}
res = append(res, result.Issue{
res = append(res, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: token.Position{
Filename: i.File,
Line: line,
@ -75,7 +75,7 @@ func NewGosec() *goanalysis.Linter {
Text: text,
LineRange: r,
FromLinter: gosecName,
})
}, pass))
}
mu.Lock()
@ -84,7 +84,7 @@ func NewGosec() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}

View file

@ -16,10 +16,10 @@ const ineffassignName = "ineffassign"
func NewIneffassign() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: ineffassignName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -40,13 +40,13 @@ func NewIneffassign() *goanalysis.Linter {
return nil, nil
}
res := make([]result.Issue, 0, len(issues))
res := make([]goanalysis.Issue, 0, len(issues))
for _, i := range issues {
res = append(res, result.Issue{
res = append(res, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: i.Pos,
Text: fmt.Sprintf("ineffectual assignment to %s", formatCode(i.IdentName, lintCtx.Cfg)),
FromLinter: ineffassignName,
})
}, pass))
}
mu.Lock()
@ -55,7 +55,7 @@ func NewIneffassign() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -16,10 +16,10 @@ const interfacerName = "interfacer"
func NewInterfacer() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: interfacerName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Requires: []*analysis.Analyzer{buildssa.Analyzer},
}
@ -45,14 +45,14 @@ func NewInterfacer() *goanalysis.Linter {
return nil, nil
}
res := make([]result.Issue, 0, len(issues))
res := make([]goanalysis.Issue, 0, len(issues))
for _, i := range issues {
pos := pass.Fset.Position(i.Pos())
res = append(res, result.Issue{
res = append(res, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: pos,
Text: i.Message(),
FromLinter: interfacerName,
})
}, pass))
}
mu.Lock()
@ -61,7 +61,7 @@ func NewInterfacer() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}

View file

@ -77,10 +77,10 @@ const lllName = "lll"
func NewLLL() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: lllName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -96,14 +96,16 @@ func NewLLL() *goanalysis.Linter {
fileNames = append(fileNames, pos.Filename)
}
var res []result.Issue
var res []goanalysis.Issue
spaces := strings.Repeat(" ", lintCtx.Settings().Lll.TabWidth)
for _, f := range fileNames {
issues, err := getLLLIssuesForFile(f, lintCtx.Settings().Lll.LineLength, spaces)
if err != nil {
return nil, err
}
res = append(res, issues...)
for i := range issues {
res = append(res, goanalysis.NewIssue(&issues[i], pass))
}
}
if len(res) == 0 {
@ -116,7 +118,7 @@ func NewLLL() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -15,9 +15,9 @@ import (
func NewMaligned() *goanalysis.Linter {
const linterName = "maligned"
var mu sync.Mutex
var res []result.Issue
var res []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -34,17 +34,17 @@ func NewMaligned() *goanalysis.Linter {
return nil, nil
}
issues := make([]result.Issue, 0, len(malignedIssues))
issues := make([]goanalysis.Issue, 0, len(malignedIssues))
for _, i := range malignedIssues {
text := fmt.Sprintf("struct of size %d bytes could be of size %d bytes", i.OldSize, i.NewSize)
if lintCtx.Settings().Maligned.SuggestNewOrder {
text += fmt.Sprintf(":\n%s", formatCodeBlock(i.NewStructDef, lintCtx.Cfg))
}
issues = append(issues, result.Issue{
issues = append(issues, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: i.Pos,
Text: text,
FromLinter: linterName,
})
}, pass))
}
mu.Lock()
@ -52,7 +52,7 @@ func NewMaligned() *goanalysis.Linter {
mu.Unlock()
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return res
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}

View file

@ -56,11 +56,11 @@ const misspellName = "misspell"
func NewMisspell() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
var ruleErr error
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: misspellName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -106,13 +106,15 @@ func NewMisspell() *goanalysis.Linter {
fileNames = append(fileNames, pos.Filename)
}
var res []result.Issue
var res []goanalysis.Issue
for _, f := range fileNames {
issues, err := runMisspellOnFile(f, &r, lintCtx)
if err != nil {
return nil, err
}
res = append(res, issues...)
for i := range issues {
res = append(res, goanalysis.NewIssue(&issues[i], pass))
}
}
if len(res) == 0 {
return nil, nil
@ -124,7 +126,7 @@ func NewMisspell() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -82,10 +82,10 @@ const nakedretName = "nakedret"
func NewNakedret() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: nakedretName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -95,14 +95,16 @@ func NewNakedret() *goanalysis.Linter {
nil,
).WithContextSetter(func(lintCtx *linter.Context) {
analyzer.Run = func(pass *analysis.Pass) (interface{}, error) {
var res []result.Issue
var res []goanalysis.Issue
for _, file := range pass.Files {
v := nakedretVisitor{
maxLength: lintCtx.Settings().Nakedret.MaxFuncLines,
f: pass.Fset,
}
ast.Walk(&v, file)
res = append(res, v.issues...)
for i := range v.issues {
res = append(res, goanalysis.NewIssue(&v.issues[i], pass))
}
}
if len(res) == 0 {
@ -115,7 +117,7 @@ func NewNakedret() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -16,10 +16,10 @@ const preallocName = "prealloc"
func NewPrealloc() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: preallocName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -31,14 +31,14 @@ func NewPrealloc() *goanalysis.Linter {
s := &lintCtx.Settings().Prealloc
analyzer.Run = func(pass *analysis.Pass) (interface{}, error) {
var res []result.Issue
var res []goanalysis.Issue
hints := prealloc.Check(pass.Files, s.Simple, s.RangeLoops, s.ForLoops)
for _, hint := range hints {
res = append(res, result.Issue{
res = append(res, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: pass.Fset.Position(hint.Pos),
Text: fmt.Sprintf("Consider preallocating %s", formatCode(hint.DeclaredSliceName, lintCtx.Cfg)),
FromLinter: preallocName,
})
}, pass))
}
if len(res) == 0 {
@ -51,7 +51,7 @@ func NewPrealloc() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -17,10 +17,10 @@ const scopelintName = "scopelint"
func NewScopelint() *goanalysis.Linter {
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: scopelintName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -47,12 +47,14 @@ func NewScopelint() *goanalysis.Linter {
}
mu.Lock()
resIssues = append(resIssues, res...)
for i := range res {
resIssues = append(resIssues, goanalysis.NewIssue(&res[i], pass))
}
mu.Unlock()
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -15,9 +15,9 @@ import (
func NewStructcheck() *goanalysis.Linter {
const linterName = "structcheck"
var mu sync.Mutex
var res []result.Issue
var res []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -35,13 +35,13 @@ func NewStructcheck() *goanalysis.Linter {
return nil, nil
}
issues := make([]result.Issue, 0, len(structcheckIssues))
issues := make([]goanalysis.Issue, 0, len(structcheckIssues))
for _, i := range structcheckIssues {
issues = append(issues, result.Issue{
issues = append(issues, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: i.Pos,
Text: fmt.Sprintf("%s is unused", formatCode(i.FieldName, lintCtx.Cfg)),
FromLinter: linterName,
})
}, pass))
}
mu.Lock()
@ -49,7 +49,7 @@ func NewStructcheck() *goanalysis.Linter {
mu.Unlock()
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return res
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}

View file

@ -9,7 +9,7 @@ import (
func NewTypecheck() *goanalysis.Linter {
const linterName = "typecheck"
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (interface{}, error) {
return nil, nil

View file

@ -14,9 +14,9 @@ import (
func NewUnconvert() *goanalysis.Linter {
const linterName = "unconvert"
var mu sync.Mutex
var res []result.Issue
var res []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -33,13 +33,13 @@ func NewUnconvert() *goanalysis.Linter {
return nil, nil
}
issues := make([]result.Issue, 0, len(positions))
issues := make([]goanalysis.Issue, 0, len(positions))
for _, pos := range positions {
issues = append(issues, result.Issue{
issues = append(issues, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: pos,
Text: "unnecessary conversion",
FromLinter: linterName,
})
}, pass))
}
mu.Lock()
@ -47,7 +47,7 @@ func NewUnconvert() *goanalysis.Linter {
mu.Unlock()
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return res
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}

View file

@ -17,10 +17,10 @@ import (
func NewUnparam() *goanalysis.Linter {
const linterName = "unparam"
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Requires: []*analysis.Analyzer{buildssa.Analyzer},
}
@ -56,13 +56,13 @@ func NewUnparam() *goanalysis.Linter {
return nil, err
}
var res []result.Issue
var res []goanalysis.Issue
for _, i := range unparamIssues {
res = append(res, result.Issue{
res = append(res, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: pass.Fset.Position(i.Pos()),
Text: i.Message(),
FromLinter: linterName,
})
}, pass))
}
mu.Lock()
@ -71,7 +71,7 @@ func NewUnparam() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}

View file

@ -1,7 +1,10 @@
package golinters
import (
"go/types"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
"honnef.co/go/tools/unused"
"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
@ -20,15 +23,22 @@ func NewUnused() *goanalysis.Linter {
"Checks Go code for unused constants, variables, functions and types",
analyzers,
nil,
).WithIssuesReporter(func(lintCtx *linter.Context) []result.Issue {
var issues []result.Issue
).WithIssuesReporter(func(lintCtx *linter.Context) []goanalysis.Issue {
typesToPkg := map[*types.Package]*packages.Package{}
for _, pkg := range lintCtx.OriginalPackages {
typesToPkg[pkg.Types] = pkg
}
var issues []goanalysis.Issue
for _, ur := range u.Result() {
p := u.ProblemObject(lintCtx.Packages[0].Fset, ur)
issues = append(issues, result.Issue{
pkg := typesToPkg[ur.Pkg()]
issues = append(issues, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
FromLinter: name,
Text: p.Message,
Pos: p.Pos,
})
Pkg: pkg,
}, nil))
}
return issues
}).WithContextSetter(func(lintCtx *linter.Context) {

View file

@ -15,9 +15,9 @@ import (
func NewVarcheck() *goanalysis.Linter {
const linterName = "varcheck"
var mu sync.Mutex
var res []result.Issue
var res []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -35,13 +35,13 @@ func NewVarcheck() *goanalysis.Linter {
return nil, nil
}
issues := make([]result.Issue, 0, len(varcheckIssues))
issues := make([]goanalysis.Issue, 0, len(varcheckIssues))
for _, i := range varcheckIssues {
issues = append(issues, result.Issue{
issues = append(issues, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
Pos: i.Pos,
Text: fmt.Sprintf("%s is unused", formatCode(i.VarName, lintCtx.Cfg)),
FromLinter: linterName,
})
}, pass))
}
mu.Lock()
@ -49,7 +49,7 @@ func NewVarcheck() *goanalysis.Linter {
mu.Unlock()
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return res
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}

View file

@ -16,10 +16,10 @@ import (
func NewWhitespace() *goanalysis.Linter {
const linterName = "whitespace"
var mu sync.Mutex
var resIssues []result.Issue
var resIssues []goanalysis.Issue
analyzer := &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
}
return goanalysis.NewLinter(
@ -41,7 +41,7 @@ func NewWhitespace() *goanalysis.Linter {
return nil, nil
}
res := make([]result.Issue, len(issues))
res := make([]goanalysis.Issue, len(issues))
for k, i := range issues {
issue := result.Issue{
Pos: token.Position{
@ -70,7 +70,7 @@ func NewWhitespace() *goanalysis.Linter {
}
issue.Replacement.NewLines = []string{bracketLine}
res[k] = issue
res[k] = goanalysis.NewIssue(&issue, pass) //nolint:scopelint
}
mu.Lock()
@ -79,7 +79,7 @@ func NewWhitespace() *goanalysis.Linter {
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -18,7 +18,7 @@ const (
// NewWSL returns a new WSL linter.
func NewWSL() *goanalysis.Linter {
var (
issues []result.Issue
issues []goanalysis.Issue
mu = sync.Mutex{}
analyzer = &analysis.Analyzer{
Name: goanalysis.TheOnlyAnalyzerName,
@ -60,16 +60,16 @@ func NewWSL() *goanalysis.Linter {
defer mu.Unlock()
for _, err := range wslErrors {
issues = append(issues, result.Issue{
issues = append(issues, goanalysis.NewIssue(&result.Issue{ //nolint:scopelint
FromLinter: name,
Pos: err.Position,
Text: err.Reason,
})
}, pass))
}
return nil, nil
}
}).WithIssuesReporter(func(*linter.Context) []result.Issue {
}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
return issues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}

View file

@ -112,13 +112,16 @@ func (r *Runner) runLinterSafe(ctx context.Context, lintCtx *linter.Context,
specificLintCtx := *lintCtx
specificLintCtx.Log = r.Log.Child(lc.Name())
issues, err := lc.Linter.Run(ctx, &specificLintCtx)
if err != nil {
return nil, err
}
for _, i := range issues {
i.FromLinter = lc.Name()
for i := range issues {
if issues[i].FromLinter == "" {
issues[i].FromLinter = lc.Name()
}
}
return issues, nil

View file

@ -14,18 +14,18 @@ type Log interface {
type LogLevel int
const (
// debug message, write to debug logs only by logutils.Debug
// Debug messages, write to debug logs only by logutils.Debug.
LogLevelDebug LogLevel = 0
// information messages, don't write too much messages,
// only useful ones: they are shown when running with -v
// Information messages, don't write too much messages,
// only useful ones: they are shown when running with -v.
LogLevelInfo LogLevel = 1
// hidden errors: non critical errors: work can be continued, no need to fail whole program;
// Hidden errors: non critical errors: work can be continued, no need to fail whole program;
// tests will crash if any warning occurred.
LogLevelWarn LogLevel = 2
// only not hidden from user errors: whole program failing, usually
// Only not hidden from user errors: whole program failing, usually
// error logging happens in 1-2 places: in the "main" function.
LogLevelError LogLevel = 3
)

View file

@ -43,7 +43,8 @@ func (Checkstyle) Print(ctx context.Context, issues []result.Issue) error {
files := map[string]*checkstyleFile{}
for _, issue := range issues {
for i := range issues {
issue := &issues[i]
file, ok := files[issue.FilePath()]
if !ok {
file = &checkstyleFile{

View file

@ -32,7 +32,8 @@ func NewCodeClimate() *CodeClimate {
func (p CodeClimate) Print(ctx context.Context, issues []result.Issue) error {
allIssues := []CodeClimateIssue{}
for _, i := range issues {
for ind := range issues {
i := &issues[ind]
var issue CodeClimateIssue
issue.Description = i.FromLinter + ": " + i.Text
issue.Location.Path = i.Pos.Filename

View file

@ -41,7 +41,8 @@ func NewJunitXML() *JunitXML {
func (JunitXML) Print(ctx context.Context, issues []result.Issue) error {
suites := make(map[string]testSuiteXML) // use a map to group by file
for _, i := range issues {
for ind := range issues {
i := &issues[ind]
suiteName := i.FilePath()
testSuite := suites[suiteName]
testSuite.Suite = i.FilePath()

View file

@ -32,9 +32,8 @@ func (p Tab) SprintfColored(ca color.Attribute, format string, args ...interface
func (p *Tab) Print(ctx context.Context, issues []result.Issue) error {
w := tabwriter.NewWriter(logutils.StdOut, 0, 0, 2, ' ', 0)
for _, i := range issues {
i := i
p.printIssue(&i, w)
for i := range issues {
p.printIssue(&issues[i], w)
}
if err := w.Flush(); err != nil {

View file

@ -37,16 +37,15 @@ func (p Text) SprintfColored(ca color.Attribute, format string, args ...interfac
}
func (p *Text) Print(ctx context.Context, issues []result.Issue) error {
for _, i := range issues {
i := i
p.printIssue(&i)
for i := range issues {
p.printIssue(&issues[i])
if !p.printIssuedLine {
continue
}
p.printSourceCode(&i)
p.printUnderLinePointer(&i)
p.printSourceCode(&issues[i])
p.printUnderLinePointer(&issues[i])
}
return nil

View file

@ -2,6 +2,8 @@ package result
import (
"go/token"
"golang.org/x/tools/go/packages"
)
type Range struct {
@ -23,18 +25,22 @@ type InlineFix struct {
type Issue struct {
FromLinter string
Text string
Pos token.Position
LineRange *Range `json:",omitempty"`
// HunkPos is used only when golangci-lint is run over a diff
HunkPos int `json:",omitempty"`
// Source lines of a code with the issue to show
SourceLines []string
// If we know how to fix the issue we can provide replacement lines
Replacement *Replacement
// Pkg is needed for proper caching of linting results
Pkg *packages.Package `json:"-"`
LineRange *Range `json:",omitempty"`
Pos token.Position
// HunkPos is used only when golangci-lint is run over a diff
HunkPos int `json:",omitempty"`
}
func (i *Issue) FilePath() string {

View file

@ -44,13 +44,14 @@ func (f Fixer) Process(issues []result.Issue) []result.Issue {
outIssues := make([]result.Issue, 0, len(issues))
issuesToFixPerFile := map[string][]result.Issue{}
for _, issue := range issues {
for i := range issues {
issue := &issues[i]
if issue.Replacement == nil {
outIssues = append(outIssues, issue)
outIssues = append(outIssues, *issue)
continue
}
issuesToFixPerFile[issue.FilePath()] = append(issuesToFixPerFile[issue.FilePath()], issue)
issuesToFixPerFile[issue.FilePath()] = append(issuesToFixPerFile[issue.FilePath()], *issue)
}
for file, issuesToFix := range issuesToFixPerFile {
@ -87,8 +88,9 @@ func (f Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error {
// merge multiple issues per line into one issue
issuesPerLine := map[int][]result.Issue{}
for _, i := range issues {
issuesPerLine[i.Line()] = append(issuesPerLine[i.Line()], i)
for i := range issues {
issue := &issues[i]
issuesPerLine[issue.Line()] = append(issuesPerLine[issue.Line()], *issue)
}
issues = issues[:0] // reuse the same memory
@ -123,7 +125,8 @@ func (f Fixer) mergeLineIssues(lineNum int, lineIssues []result.Issue, origFileL
}
// check issues first
for _, i := range lineIssues {
for ind := range lineIssues {
i := &lineIssues[ind]
if i.LineRange != nil {
f.log.Infof("Line %d has multiple issues but at least one of them is ranged: %#v", lineNum, lineIssues)
return &lineIssues[0]
@ -156,8 +159,8 @@ func (f Fixer) applyInlineFixes(lineIssues []result.Issue, origLine []byte, line
// example: origLine="it's becouse of them", StartCol=5, Length=7, NewString="because"
curOrigLinePos := 0
for _, i := range lineIssues {
fix := i.Replacement.Inline
for i := range lineIssues {
fix := lineIssues[i].Replacement.Inline
if fix.StartCol < curOrigLinePos {
f.log.Warnf("Line %d has multiple intersecting issues: %#v", lineNum, lineIssues)
return nil
@ -188,14 +191,15 @@ func (f Fixer) findNotIntersectingIssues(issues []result.Issue) []result.Issue {
var ret []result.Issue
var currentEnd int
for _, issue := range issues {
for i := range issues {
issue := &issues[i]
rng := issue.GetLineRange()
if rng.From <= currentEnd {
f.log.Infof("Skip issue %#v: intersects with end %d", issue, currentEnd)
continue // skip intersecting issue
}
f.log.Infof("Fix issue %#v with range %v", issue, issue.GetLineRange())
ret = append(ret, issue)
ret = append(ret, *issue)
currentEnd = rng.To
}

View file

@ -1,17 +1,16 @@
package processors
import (
"fmt"
"github.com/pkg/errors"
"github.com/golangci/golangci-lint/pkg/result"
)
func filterIssues(issues []result.Issue, filter func(i *result.Issue) bool) []result.Issue {
retIssues := make([]result.Issue, 0, len(issues))
for _, i := range issues {
i := i
if filter(&i) {
retIssues = append(retIssues, i)
for i := range issues {
if filter(&issues[i]) {
retIssues = append(retIssues, issues[i])
}
}
@ -20,15 +19,14 @@ func filterIssues(issues []result.Issue, filter func(i *result.Issue) bool) []re
func filterIssuesErr(issues []result.Issue, filter func(i *result.Issue) (bool, error)) ([]result.Issue, error) {
retIssues := make([]result.Issue, 0, len(issues))
for _, i := range issues {
i := i
ok, err := filter(&i)
for i := range issues {
ok, err := filter(&issues[i])
if err != nil {
return nil, fmt.Errorf("can't filter issue %#v: %s", i, err)
return nil, errors.Wrapf(err, "can't filter issue %#v", issues[i])
}
if ok {
retIssues = append(retIssues, i)
retIssues = append(retIssues, issues[i])
}
}
@ -37,9 +35,8 @@ func filterIssuesErr(issues []result.Issue, filter func(i *result.Issue) (bool,
func transformIssues(issues []result.Issue, transform func(i *result.Issue) *result.Issue) []result.Issue {
retIssues := make([]result.Issue, 0, len(issues))
for _, i := range issues {
i := i
newI := transform(&i)
for i := range issues {
newI := transform(&issues[i])
if newI != nil {
retIssues = append(retIssues, *newI)
}

View file

@ -7,6 +7,6 @@ import (
)
func BodycloseNotClosed() {
resp, _ := http.Get("https://google.com") // ERROR "bodyclose: response body must be closed"
resp, _ := http.Get("https://google.com") // ERROR "response body must be closed"
_, _ = ioutil.ReadAll(resp.Body)
}