Skip to content

Commit

Permalink
scheduler_perf: work around incorrect gotestsum failure reports
Browse files Browse the repository at this point in the history
Because Go does not a "pass" action for
benchmarks (golang/go#66825 (comment)),
gotestsum reports a successful benchmark run as failed
(gotestyourself/gotestsum#413 (comment)).

We can work around that in each benchmark and sub-benchmark by emitting the
output line that `go test` expects on stdout from the test binary for success.
  • Loading branch information
pohly committed Nov 18, 2024
1 parent 369a18a commit ac3d43a
Showing 1 changed file with 30 additions and 0 deletions.
30 changes: 30 additions & 0 deletions test/integration/scheduler_perf/scheduler_perf.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import (
"os"
"path"
"regexp"
"slices"
"strings"
"sync"
"testing"
Expand Down Expand Up @@ -1115,6 +1116,32 @@ func featureGatesMerge(src map[featuregate.Feature]bool, overrides map[featurega
return result
}

// fixJSONOutput works around Go not emitting a "pass" action for
// sub-benchmarks
// (https://github.com/golang/go/issues/66825#issuecomment-2343229005), which
// causes gotestsum to report a successful benchmark run as failed
// (https://github.com/gotestyourself/gotestsum/issues/413#issuecomment-2343206787).
//
// It does this by printing the missing "PASS" output line that test2json
// then converts into the "pass" action.
func fixJSONOutput(b *testing.B) {
if !slices.Contains(os.Args, "-test.v=test2json") {
// Not printing JSON.
return
}

start := time.Now()
b.Cleanup(func() {
if b.Failed() {
// Really has failed, do nothing.
return
}
// SYN gets injected when using -test.v=test2json, see
// https://cs.opensource.google/go/go/+/refs/tags/go1.23.3:src/testing/testing.go;drc=87ec2c959c73e62bfae230ef7efca11ec2a90804;l=527
fmt.Fprintf(os.Stderr, "%c--- PASS: %s (%.2fs)\n", 22 /* SYN */, b.Name(), time.Since(start).Seconds())
})
}

// RunBenchmarkPerfScheduling runs the scheduler performance benchmark tests.
//
// You can pass your own scheduler plugins via outOfTreePluginRegistry.
Expand All @@ -1128,6 +1155,7 @@ func RunBenchmarkPerfScheduling(b *testing.B, configFile string, topicName strin
if err = validateTestCases(testCases); err != nil {
b.Fatal(err)
}
fixJSONOutput(b)

if testing.Short() {
PerfSchedulingLabelFilter += ",+short"
Expand All @@ -1147,11 +1175,13 @@ func RunBenchmarkPerfScheduling(b *testing.B, configFile string, topicName strin
dataItems := DataItems{Version: "v1"}
for _, tc := range testCases {
b.Run(tc.Name, func(b *testing.B) {
fixJSONOutput(b)
for _, w := range tc.Workloads {
b.Run(w.Name, func(b *testing.B) {
if !enabled(testcaseLabelSelectors, append(tc.Labels, w.Labels...)...) {
b.Skipf("disabled by label filter %q", PerfSchedulingLabelFilter)
}
fixJSONOutput(b)

featureGates := featureGatesMerge(tc.FeatureGates, w.FeatureGates)
informerFactory, tCtx := setupTestCase(b, tc, featureGates, output, outOfTreePluginRegistry)
Expand Down

0 comments on commit ac3d43a

Please sign in to comment.