Skip to content

Commit c4638a9

Browse files
craig[bot]michae2yuzefovich
committed
145984: logictest: deflake recent addition to select_for_update r=yuzefovich a=michae2 The testcases I added in #144449 depend on a large enough batch size to match the expected tracing. Fixes: #144830 Release note: None 146155: explain: deflake TestMaximumMemoryUsage for good r=yuzefovich a=yuzefovich This commit reverts adeddb5 which attempted to deflake `TestMaximumMemoryUsage` which could fail under metamorphic randomization. Instead, this commit simply forces production values for the relevant batch sizes (I think `kv-batch-size` is the one to blame) because the previous attempt of introducing the retry loop could still flake. Fixes: #146052. Release note: None Co-authored-by: Michael Erickson <[email protected]> Co-authored-by: Yahor Yuzefovich <[email protected]>
3 parents 86041a1 + a8da81f + b83a627 commit c4638a9

File tree

3 files changed

+31
-28
lines changed

3 files changed

+31
-28
lines changed

pkg/sql/logictest/testdata/logic_test/select_for_update

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# LogicTest: !weak-iso-level-configs
1+
# LogicTest: !weak-iso-level-configs !metamorphic-batch-sizes
22
# This test assumes that SERIALIZABLE isolation is being used. See
33
# select_for_update_read_committed for READ COMMITTED testing.
44

pkg/sql/opt/exec/explain/BUILD.bazel

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,6 @@ go_test(
8989
"//pkg/util/log",
9090
"//pkg/util/treeprinter",
9191
"@com_github_cockroachdb_datadriven//:datadriven",
92-
"@com_github_cockroachdb_errors//:errors",
9392
"@com_github_stretchr_testify//assert",
9493
"@com_github_stretchr_testify//require",
9594
"@in_gopkg_yaml_v2//:yaml_v2",

pkg/sql/opt/exec/explain/output_test.go

Lines changed: 30 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ import (
1818
"github.com/cockroachdb/cockroach/pkg/multitenant/tenantcapabilitiespb"
1919
"github.com/cockroachdb/cockroach/pkg/sql/catalog/colinfo"
2020
"github.com/cockroachdb/cockroach/pkg/sql/opt/exec/explain"
21+
"github.com/cockroachdb/cockroach/pkg/sql/sem/eval"
2122
"github.com/cockroachdb/cockroach/pkg/sql/types"
2223
"github.com/cockroachdb/cockroach/pkg/testutils"
2324
"github.com/cockroachdb/cockroach/pkg/testutils/datapathutils"
@@ -30,7 +31,6 @@ import (
3031
"github.com/cockroachdb/cockroach/pkg/util/leaktest"
3132
"github.com/cockroachdb/cockroach/pkg/util/log"
3233
"github.com/cockroachdb/datadriven"
33-
"github.com/cockroachdb/errors"
3434
"github.com/stretchr/testify/assert"
3535
"github.com/stretchr/testify/require"
3636
yaml "gopkg.in/yaml.v2"
@@ -395,7 +395,21 @@ func TestMaximumMemoryUsage(t *testing.T) {
395395
skip.UnderRace(t, "multinode cluster setup times out under race")
396396

397397
const numNodes = 3
398-
tc := testcluster.StartTestCluster(t, numNodes, base.TestClusterArgs{})
398+
tc := testcluster.StartTestCluster(t, numNodes, base.TestClusterArgs{
399+
ServerArgs: base.TestServerArgs{
400+
Knobs: base.TestingKnobs{
401+
SQLEvalContext: &eval.TestingKnobs{
402+
// We disable the randomization of the batch sizes so that
403+
// small number of gRPC calls is issued in the query below
404+
// (with kv-batch-size=1 we would issue 10k of them which
405+
// might result in dropping the ComponentStats proto that
406+
// powers "maximum memory usage" from the trace, flaking the
407+
// test).
408+
ForceProductionValues: true,
409+
},
410+
},
411+
},
412+
})
399413
ctx := context.Background()
400414
defer tc.Stopper().Stop(ctx)
401415

@@ -418,29 +432,19 @@ func TestMaximumMemoryUsage(t *testing.T) {
418432
return err
419433
})
420434

421-
// In rare cases (due to metamorphic randomization) we might drop the
422-
// ComponentStats proto that powers "maximum memory usage" stat from the
423-
// trace, so we add a retry loop around this.
424-
testutils.SucceedsSoon(t, func() error {
425-
rows := db.QueryStr(t, "EXPLAIN ANALYZE SELECT max(v) FROM t GROUP BY bucket;")
426-
var output strings.Builder
427-
maxMemoryRE := regexp.MustCompile(`maximum memory usage: ([\d\.]+) MiB`)
428-
var maxMemoryUsage float64
429-
for _, row := range rows {
430-
output.WriteString(row[0])
431-
output.WriteString("\n")
432-
s := strings.TrimSpace(row[0])
433-
if matches := maxMemoryRE.FindStringSubmatch(s); len(matches) > 0 {
434-
var err error
435-
maxMemoryUsage, err = strconv.ParseFloat(matches[1], 64)
436-
if err != nil {
437-
return err
438-
}
439-
}
440-
}
441-
if maxMemoryUsage < 5.0 {
442-
return errors.Newf("expected maximum memory usage to be at least 5 MiB, full output:\n\n%s", output.String())
435+
rows := db.QueryStr(t, "EXPLAIN ANALYZE SELECT max(v) FROM t GROUP BY bucket;")
436+
var output strings.Builder
437+
maxMemoryRE := regexp.MustCompile(`maximum memory usage: ([\d\.]+) MiB`)
438+
var maxMemoryUsage float64
439+
for _, row := range rows {
440+
output.WriteString(row[0])
441+
output.WriteString("\n")
442+
s := strings.TrimSpace(row[0])
443+
if matches := maxMemoryRE.FindStringSubmatch(s); len(matches) > 0 {
444+
var err error
445+
maxMemoryUsage, err = strconv.ParseFloat(matches[1], 64)
446+
require.NoError(t, err)
443447
}
444-
return nil
445-
})
448+
}
449+
require.Greaterf(t, maxMemoryUsage, 5.0, "expected maximum memory usage to be at least 5 MiB, full output:\n\n%s", output.String())
446450
}

0 commit comments

Comments
 (0)