Move gocovmerge as vendor (#10947)
* Move gocovmerge as vendor * Update Makefile Co-Authored-By: guillep2k <18600385+guillep2k@users.noreply.github.com> Co-authored-by: guillep2k <18600385+guillep2k@users.noreply.github.com>tokarchuk/v1.17
parent
14c97c0501
commit
b264c30394
@ -0,0 +1,119 @@ |
|||||||
|
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||||
|
// Copyright (c) 2015, Wade Simmons
|
||||||
|
// Use of this source code is governed by a MIT-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// gocovmerge takes the results from multiple `go test -coverprofile` runs and
|
||||||
|
// merges them into one profile
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
package main |
||||||
|
|
||||||
|
import ( |
||||||
|
"flag" |
||||||
|
"fmt" |
||||||
|
"io" |
||||||
|
"log" |
||||||
|
"os" |
||||||
|
"sort" |
||||||
|
|
||||||
|
"golang.org/x/tools/cover" |
||||||
|
) |
||||||
|
|
||||||
|
func mergeProfiles(p *cover.Profile, merge *cover.Profile) { |
||||||
|
if p.Mode != merge.Mode { |
||||||
|
log.Fatalf("cannot merge profiles with different modes") |
||||||
|
} |
||||||
|
// Since the blocks are sorted, we can keep track of where the last block
|
||||||
|
// was inserted and only look at the blocks after that as targets for merge
|
||||||
|
startIndex := 0 |
||||||
|
for _, b := range merge.Blocks { |
||||||
|
startIndex = mergeProfileBlock(p, b, startIndex) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
func mergeProfileBlock(p *cover.Profile, pb cover.ProfileBlock, startIndex int) int { |
||||||
|
sortFunc := func(i int) bool { |
||||||
|
pi := p.Blocks[i+startIndex] |
||||||
|
return pi.StartLine >= pb.StartLine && (pi.StartLine != pb.StartLine || pi.StartCol >= pb.StartCol) |
||||||
|
} |
||||||
|
|
||||||
|
i := 0 |
||||||
|
if sortFunc(i) != true { |
||||||
|
i = sort.Search(len(p.Blocks)-startIndex, sortFunc) |
||||||
|
} |
||||||
|
i += startIndex |
||||||
|
if i < len(p.Blocks) && p.Blocks[i].StartLine == pb.StartLine && p.Blocks[i].StartCol == pb.StartCol { |
||||||
|
if p.Blocks[i].EndLine != pb.EndLine || p.Blocks[i].EndCol != pb.EndCol { |
||||||
|
log.Fatalf("OVERLAP MERGE: %v %v %v", p.FileName, p.Blocks[i], pb) |
||||||
|
} |
||||||
|
switch p.Mode { |
||||||
|
case "set": |
||||||
|
p.Blocks[i].Count |= pb.Count |
||||||
|
case "count", "atomic": |
||||||
|
p.Blocks[i].Count += pb.Count |
||||||
|
default: |
||||||
|
log.Fatalf("unsupported covermode: '%s'", p.Mode) |
||||||
|
} |
||||||
|
} else { |
||||||
|
if i > 0 { |
||||||
|
pa := p.Blocks[i-1] |
||||||
|
if pa.EndLine >= pb.EndLine && (pa.EndLine != pb.EndLine || pa.EndCol > pb.EndCol) { |
||||||
|
log.Fatalf("OVERLAP BEFORE: %v %v %v", p.FileName, pa, pb) |
||||||
|
} |
||||||
|
} |
||||||
|
if i < len(p.Blocks)-1 { |
||||||
|
pa := p.Blocks[i+1] |
||||||
|
if pa.StartLine <= pb.StartLine && (pa.StartLine != pb.StartLine || pa.StartCol < pb.StartCol) { |
||||||
|
log.Fatalf("OVERLAP AFTER: %v %v %v", p.FileName, pa, pb) |
||||||
|
} |
||||||
|
} |
||||||
|
p.Blocks = append(p.Blocks, cover.ProfileBlock{}) |
||||||
|
copy(p.Blocks[i+1:], p.Blocks[i:]) |
||||||
|
p.Blocks[i] = pb |
||||||
|
} |
||||||
|
return i + 1 |
||||||
|
} |
||||||
|
|
||||||
|
func addProfile(profiles []*cover.Profile, p *cover.Profile) []*cover.Profile { |
||||||
|
i := sort.Search(len(profiles), func(i int) bool { return profiles[i].FileName >= p.FileName }) |
||||||
|
if i < len(profiles) && profiles[i].FileName == p.FileName { |
||||||
|
mergeProfiles(profiles[i], p) |
||||||
|
} else { |
||||||
|
profiles = append(profiles, nil) |
||||||
|
copy(profiles[i+1:], profiles[i:]) |
||||||
|
profiles[i] = p |
||||||
|
} |
||||||
|
return profiles |
||||||
|
} |
||||||
|
|
||||||
|
func dumpProfiles(profiles []*cover.Profile, out io.Writer) { |
||||||
|
if len(profiles) == 0 { |
||||||
|
return |
||||||
|
} |
||||||
|
fmt.Fprintf(out, "mode: %s\n", profiles[0].Mode) |
||||||
|
for _, p := range profiles { |
||||||
|
for _, b := range p.Blocks { |
||||||
|
fmt.Fprintf(out, "%s:%d.%d,%d.%d %d %d\n", p.FileName, b.StartLine, b.StartCol, b.EndLine, b.EndCol, b.NumStmt, b.Count) |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
func main() { |
||||||
|
flag.Parse() |
||||||
|
|
||||||
|
var merged []*cover.Profile |
||||||
|
|
||||||
|
for _, file := range flag.Args() { |
||||||
|
profiles, err := cover.ParseProfiles(file) |
||||||
|
if err != nil { |
||||||
|
log.Fatalf("failed to parse profiles: %v", err) |
||||||
|
} |
||||||
|
for _, p := range profiles { |
||||||
|
merged = addProfile(merged, p) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
dumpProfiles(merged, os.Stdout) |
||||||
|
} |
@ -0,0 +1,256 @@ |
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Package cover provides support for parsing coverage profiles
|
||||||
|
// generated by "go test -coverprofile=cover.out".
|
||||||
|
package cover // import "golang.org/x/tools/cover"
|
||||||
|
|
||||||
|
import ( |
||||||
|
"bufio" |
||||||
|
"errors" |
||||||
|
"fmt" |
||||||
|
"math" |
||||||
|
"os" |
||||||
|
"sort" |
||||||
|
"strconv" |
||||||
|
"strings" |
||||||
|
) |
||||||
|
|
||||||
|
// Profile represents the profiling data for a specific file.
|
||||||
|
type Profile struct { |
||||||
|
FileName string |
||||||
|
Mode string |
||||||
|
Blocks []ProfileBlock |
||||||
|
} |
||||||
|
|
||||||
|
// ProfileBlock represents a single block of profiling data.
|
||||||
|
type ProfileBlock struct { |
||||||
|
StartLine, StartCol int |
||||||
|
EndLine, EndCol int |
||||||
|
NumStmt, Count int |
||||||
|
} |
||||||
|
|
||||||
|
type byFileName []*Profile |
||||||
|
|
||||||
|
func (p byFileName) Len() int { return len(p) } |
||||||
|
func (p byFileName) Less(i, j int) bool { return p[i].FileName < p[j].FileName } |
||||||
|
func (p byFileName) Swap(i, j int) { p[i], p[j] = p[j], p[i] } |
||||||
|
|
||||||
|
// ParseProfiles parses profile data in the specified file and returns a
|
||||||
|
// Profile for each source file described therein.
|
||||||
|
func ParseProfiles(fileName string) ([]*Profile, error) { |
||||||
|
pf, err := os.Open(fileName) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
defer pf.Close() |
||||||
|
|
||||||
|
files := make(map[string]*Profile) |
||||||
|
buf := bufio.NewReader(pf) |
||||||
|
// First line is "mode: foo", where foo is "set", "count", or "atomic".
|
||||||
|
// Rest of file is in the format
|
||||||
|
// encoding/base64/base64.go:34.44,37.40 3 1
|
||||||
|
// where the fields are: name.go:line.column,line.column numberOfStatements count
|
||||||
|
s := bufio.NewScanner(buf) |
||||||
|
mode := "" |
||||||
|
for s.Scan() { |
||||||
|
line := s.Text() |
||||||
|
if mode == "" { |
||||||
|
const p = "mode: " |
||||||
|
if !strings.HasPrefix(line, p) || line == p { |
||||||
|
return nil, fmt.Errorf("bad mode line: %v", line) |
||||||
|
} |
||||||
|
mode = line[len(p):] |
||||||
|
continue |
||||||
|
} |
||||||
|
fn, b, err := parseLine(line) |
||||||
|
if err != nil { |
||||||
|
return nil, fmt.Errorf("line %q doesn't match expected format: %v", line, err) |
||||||
|
} |
||||||
|
p := files[fn] |
||||||
|
if p == nil { |
||||||
|
p = &Profile{ |
||||||
|
FileName: fn, |
||||||
|
Mode: mode, |
||||||
|
} |
||||||
|
files[fn] = p |
||||||
|
} |
||||||
|
p.Blocks = append(p.Blocks, b) |
||||||
|
} |
||||||
|
if err := s.Err(); err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
for _, p := range files { |
||||||
|
sort.Sort(blocksByStart(p.Blocks)) |
||||||
|
// Merge samples from the same location.
|
||||||
|
j := 1 |
||||||
|
for i := 1; i < len(p.Blocks); i++ { |
||||||
|
b := p.Blocks[i] |
||||||
|
last := p.Blocks[j-1] |
||||||
|
if b.StartLine == last.StartLine && |
||||||
|
b.StartCol == last.StartCol && |
||||||
|
b.EndLine == last.EndLine && |
||||||
|
b.EndCol == last.EndCol { |
||||||
|
if b.NumStmt != last.NumStmt { |
||||||
|
return nil, fmt.Errorf("inconsistent NumStmt: changed from %d to %d", last.NumStmt, b.NumStmt) |
||||||
|
} |
||||||
|
if mode == "set" { |
||||||
|
p.Blocks[j-1].Count |= b.Count |
||||||
|
} else { |
||||||
|
p.Blocks[j-1].Count += b.Count |
||||||
|
} |
||||||
|
continue |
||||||
|
} |
||||||
|
p.Blocks[j] = b |
||||||
|
j++ |
||||||
|
} |
||||||
|
p.Blocks = p.Blocks[:j] |
||||||
|
} |
||||||
|
// Generate a sorted slice.
|
||||||
|
profiles := make([]*Profile, 0, len(files)) |
||||||
|
for _, profile := range files { |
||||||
|
profiles = append(profiles, profile) |
||||||
|
} |
||||||
|
sort.Sort(byFileName(profiles)) |
||||||
|
return profiles, nil |
||||||
|
} |
||||||
|
|
||||||
|
// parseLine parses a line from a coverage file.
|
||||||
|
// It is equivalent to the regex
|
||||||
|
// ^(.+):([0-9]+)\.([0-9]+),([0-9]+)\.([0-9]+) ([0-9]+) ([0-9]+)$
|
||||||
|
//
|
||||||
|
// However, it is much faster: https://golang.org/cl/179377
|
||||||
|
func parseLine(l string) (fileName string, block ProfileBlock, err error) { |
||||||
|
end := len(l) |
||||||
|
|
||||||
|
b := ProfileBlock{} |
||||||
|
b.Count, end, err = seekBack(l, ' ', end, "Count") |
||||||
|
if err != nil { |
||||||
|
return "", b, err |
||||||
|
} |
||||||
|
b.NumStmt, end, err = seekBack(l, ' ', end, "NumStmt") |
||||||
|
if err != nil { |
||||||
|
return "", b, err |
||||||
|
} |
||||||
|
b.EndCol, end, err = seekBack(l, '.', end, "EndCol") |
||||||
|
if err != nil { |
||||||
|
return "", b, err |
||||||
|
} |
||||||
|
b.EndLine, end, err = seekBack(l, ',', end, "EndLine") |
||||||
|
if err != nil { |
||||||
|
return "", b, err |
||||||
|
} |
||||||
|
b.StartCol, end, err = seekBack(l, '.', end, "StartCol") |
||||||
|
if err != nil { |
||||||
|
return "", b, err |
||||||
|
} |
||||||
|
b.StartLine, end, err = seekBack(l, ':', end, "StartLine") |
||||||
|
if err != nil { |
||||||
|
return "", b, err |
||||||
|
} |
||||||
|
fn := l[0:end] |
||||||
|
if fn == "" { |
||||||
|
return "", b, errors.New("a FileName cannot be blank") |
||||||
|
} |
||||||
|
return fn, b, nil |
||||||
|
} |
||||||
|
|
||||||
|
// seekBack searches backwards from end to find sep in l, then returns the
|
||||||
|
// value between sep and end as an integer.
|
||||||
|
// If seekBack fails, the returned error will reference what.
|
||||||
|
func seekBack(l string, sep byte, end int, what string) (value int, nextSep int, err error) { |
||||||
|
// Since we're seeking backwards and we know only ASCII is legal for these values,
|
||||||
|
// we can ignore the possibility of non-ASCII characters.
|
||||||
|
for start := end - 1; start >= 0; start-- { |
||||||
|
if l[start] == sep { |
||||||
|
i, err := strconv.Atoi(l[start+1 : end]) |
||||||
|
if err != nil { |
||||||
|
return 0, 0, fmt.Errorf("couldn't parse %q: %v", what, err) |
||||||
|
} |
||||||
|
if i < 0 { |
||||||
|
return 0, 0, fmt.Errorf("negative values are not allowed for %s, found %d", what, i) |
||||||
|
} |
||||||
|
return i, start, nil |
||||||
|
} |
||||||
|
} |
||||||
|
return 0, 0, fmt.Errorf("couldn't find a %s before %s", string(sep), what) |
||||||
|
} |
||||||
|
|
||||||
|
type blocksByStart []ProfileBlock |
||||||
|
|
||||||
|
func (b blocksByStart) Len() int { return len(b) } |
||||||
|
func (b blocksByStart) Swap(i, j int) { b[i], b[j] = b[j], b[i] } |
||||||
|
func (b blocksByStart) Less(i, j int) bool { |
||||||
|
bi, bj := b[i], b[j] |
||||||
|
return bi.StartLine < bj.StartLine || bi.StartLine == bj.StartLine && bi.StartCol < bj.StartCol |
||||||
|
} |
||||||
|
|
||||||
|
// Boundary represents the position in a source file of the beginning or end of a
|
||||||
|
// block as reported by the coverage profile. In HTML mode, it will correspond to
|
||||||
|
// the opening or closing of a <span> tag and will be used to colorize the source
|
||||||
|
type Boundary struct { |
||||||
|
Offset int // Location as a byte offset in the source file.
|
||||||
|
Start bool // Is this the start of a block?
|
||||||
|
Count int // Event count from the cover profile.
|
||||||
|
Norm float64 // Count normalized to [0..1].
|
||||||
|
} |
||||||
|
|
||||||
|
// Boundaries returns a Profile as a set of Boundary objects within the provided src.
|
||||||
|
func (p *Profile) Boundaries(src []byte) (boundaries []Boundary) { |
||||||
|
// Find maximum count.
|
||||||
|
max := 0 |
||||||
|
for _, b := range p.Blocks { |
||||||
|
if b.Count > max { |
||||||
|
max = b.Count |
||||||
|
} |
||||||
|
} |
||||||
|
// Divisor for normalization.
|
||||||
|
divisor := math.Log(float64(max)) |
||||||
|
|
||||||
|
// boundary returns a Boundary, populating the Norm field with a normalized Count.
|
||||||
|
boundary := func(offset int, start bool, count int) Boundary { |
||||||
|
b := Boundary{Offset: offset, Start: start, Count: count} |
||||||
|
if !start || count == 0 { |
||||||
|
return b |
||||||
|
} |
||||||
|
if max <= 1 { |
||||||
|
b.Norm = 0.8 // Profile is in"set" mode; we want a heat map. Use cov8 in the CSS.
|
||||||
|
} else if count > 0 { |
||||||
|
b.Norm = math.Log(float64(count)) / divisor |
||||||
|
} |
||||||
|
return b |
||||||
|
} |
||||||
|
|
||||||
|
line, col := 1, 2 // TODO: Why is this 2?
|
||||||
|
for si, bi := 0, 0; si < len(src) && bi < len(p.Blocks); { |
||||||
|
b := p.Blocks[bi] |
||||||
|
if b.StartLine == line && b.StartCol == col { |
||||||
|
boundaries = append(boundaries, boundary(si, true, b.Count)) |
||||||
|
} |
||||||
|
if b.EndLine == line && b.EndCol == col || line > b.EndLine { |
||||||
|
boundaries = append(boundaries, boundary(si, false, 0)) |
||||||
|
bi++ |
||||||
|
continue // Don't advance through src; maybe the next block starts here.
|
||||||
|
} |
||||||
|
if src[si] == '\n' { |
||||||
|
line++ |
||||||
|
col = 0 |
||||||
|
} |
||||||
|
col++ |
||||||
|
si++ |
||||||
|
} |
||||||
|
sort.Sort(boundariesByPos(boundaries)) |
||||||
|
return |
||||||
|
} |
||||||
|
|
||||||
|
type boundariesByPos []Boundary |
||||||
|
|
||||||
|
func (b boundariesByPos) Len() int { return len(b) } |
||||||
|
func (b boundariesByPos) Swap(i, j int) { b[i], b[j] = b[j], b[i] } |
||||||
|
func (b boundariesByPos) Less(i, j int) bool { |
||||||
|
if b[i].Offset == b[j].Offset { |
||||||
|
return !b[i].Start && b[j].Start |
||||||
|
} |
||||||
|
return b[i].Offset < b[j].Offset |
||||||
|
} |
Loading…
Reference in new issue