Skip to content

Commit

Permalink
go/types: turn TestBenchmark into a normal benchmark
Browse files Browse the repository at this point in the history
TestBenchmark doesn't use the -bench flag, so that it can format custom
output -- the number of checked lines per second. This is a barrier both
to discoverability, and to piping benchmark output into analysis tools
such as benchstat.

Using testing.B.ReportMetric and a bit of manual timing, we can achieve
similar results while conforming to normal benchmark output. Do this,
and rename the test func to BenchmarkCheck (for symmetry with
TestCheck).

Change-Id: Ie8f2259c1ca9e6986f0137287acf8eb2843f96b8
Reviewed-on: https://go-review.googlesource.com/c/go/+/257958
Run-TryBot: Robert Findley <[email protected]>
TryBot-Result: Go Bot <[email protected]>
Trust: Robert Findley <[email protected]>
Reviewed-by: Robert Griesemer <[email protected]>
  • Loading branch information
findleyr committed Oct 1, 2020
1 parent 069aef4 commit 7347907
Showing 1 changed file with 45 additions and 35 deletions.
80 changes: 45 additions & 35 deletions src/go/types/self_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,18 @@
package types_test

import (
"flag"
"fmt"
"go/ast"
"go/importer"
"go/parser"
"go/token"
"path"
"path/filepath"
"testing"
"time"

. "go/types"
)

var benchmark = flag.Bool("b", false, "run benchmarks")

func TestSelf(t *testing.T) {
fset := token.NewFileSet()
files, err := pkgFiles(fset, ".")
Expand All @@ -39,57 +36,70 @@ func TestSelf(t *testing.T) {
}
}

func TestBenchmark(t *testing.T) {
if !*benchmark {
return
}

// We're not using testing's benchmarking mechanism directly
// because we want custom output.

func BenchmarkCheck(b *testing.B) {
for _, p := range []string{
"net/http",
"go/parser",
"go/constant",
filepath.Join("go", "internal", "gcimporter"),
} {
path := filepath.Join("..", "..", p)
runbench(t, path, false)
runbench(t, path, true)
fmt.Println()
b.Run(path.Base(p), func(b *testing.B) {
path := filepath.Join("..", "..", p)
for _, ignoreFuncBodies := range []bool{false, true} {
name := "funcbodies"
if ignoreFuncBodies {
name = "nofuncbodies"
}
b.Run(name, func(b *testing.B) {
b.Run("info", func(b *testing.B) {
runbench(b, path, ignoreFuncBodies, true)
})
b.Run("noinfo", func(b *testing.B) {
runbench(b, path, ignoreFuncBodies, false)
})
})
}
})
}
}

func runbench(t *testing.T, path string, ignoreFuncBodies bool) {
func runbench(b *testing.B, path string, ignoreFuncBodies, writeInfo bool) {
fset := token.NewFileSet()
files, err := pkgFiles(fset, path)
if err != nil {
t.Fatal(err)
b.Fatal(err)
}

b := testing.Benchmark(func(b *testing.B) {
for i := 0; i < b.N; i++ {
conf := Config{
IgnoreFuncBodies: ignoreFuncBodies,
Importer: importer.Default(),
}
if _, err := conf.Check(path, fset, files, nil); err != nil {
t.Fatal(err)
}
}
})

// determine line count
lines := 0
fset.Iterate(func(f *token.File) bool {
lines += f.LineCount()
return true
})

d := time.Duration(b.NsPerOp())
fmt.Printf("%s (ignoreFuncBodies = %v):\n", filepath.Base(path), ignoreFuncBodies)
fmt.Printf("\t%s for %d lines (%.0f lines/s)\n", d, lines, float64(lines)/d.Seconds())
fmt.Printf("\t%s\n", b.MemString())
b.ResetTimer()
start := time.Now()
for i := 0; i < b.N; i++ {
conf := Config{
IgnoreFuncBodies: ignoreFuncBodies,
Importer: importer.Default(),
}
var info *Info
if writeInfo {
info = &Info{
Types: make(map[ast.Expr]TypeAndValue),
Defs: make(map[*ast.Ident]Object),
Uses: make(map[*ast.Ident]Object),
Implicits: make(map[ast.Node]Object),
Selections: make(map[*ast.SelectorExpr]*Selection),
Scopes: make(map[ast.Node]*Scope),
}
}
if _, err := conf.Check(path, fset, files, info); err != nil {
b.Fatal(err)
}
}
b.StopTimer()
b.ReportMetric(float64(lines)*float64(b.N)/time.Since(start).Seconds(), "lines/s")
}

func pkgFiles(fset *token.FileSet, path string) ([]*ast.File, error) {
Expand Down

0 comments on commit 7347907

Please sign in to comment.