Skip to content

Commit 20591ca

Browse files
craig[bot]yuzefovich
andcommitted
147933: importer: remove tests for PGDUMP and MYSQLDUMP r=yuzefovich a=yuzefovich This commit only touches the tests for deprecated IMPORT formats. It removes the tests that are specific to these formats (which includes things like creating stub table statistics for newly-created tables, which other formats can't do). It also skips `TestImportMultiRegion` under race (similar to all other multi-node IMPORT tests) since I've seen some timeouts on it in CI. Epic: None Release note: None 148234: row: remove InitChecksum for each KV inserted by IMPORT r=yuzefovich a=yuzefovich I noticed `InitChecksum` call show up in the CPU profile of a node from 300 node cluster when IMPORT is running, and I was confused by it. I did some archaeology, and I think it hasn't been needed for a long time. For context, prior to 2.0, we effectively required setting the checksum on the Value since evaluation of CPuts compared RawBytes directly, which included the checksum comparison. This was an oversight because the checksum should be optional, and it became such in 33957ae. However, in order to support IMPORT in mixed-version 1.1-2.0 cluster, in 5ae1bb2 we added the population of the checksum for all KVs produced for insertion. There is a discussion on the PR that this is no longer needed as of the optional checksum change mentioned above. There is also a very recent discussion about removing this checksum in non-test code altogether since it probably doesn't give us much since we do checksum verification at the storage / network level. See #145541. It should be safe to not populate the checksum for IMPORT as it seems to be the only place we do so in the SQL land. Epic: None Release note: None Co-authored-by: Yahor Yuzefovich <[email protected]>
3 parents b8c93ad + 8ff85ae + 359e775 commit 20591ca

30 files changed

+68
-3905
lines changed

pkg/ccl/importerccl/BUILD.bazel

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ go_test(
3535
"//pkg/testutils",
3636
"//pkg/testutils/datapathutils",
3737
"//pkg/testutils/serverutils",
38+
"//pkg/testutils/skip",
3839
"//pkg/testutils/sqlutils",
3940
"//pkg/testutils/testcluster",
4041
"//pkg/util/leaktest",

pkg/ccl/importerccl/ccl_test.go

Lines changed: 3 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ import (
3333
"github.com/cockroachdb/cockroach/pkg/testutils"
3434
"github.com/cockroachdb/cockroach/pkg/testutils/datapathutils"
3535
"github.com/cockroachdb/cockroach/pkg/testutils/serverutils"
36+
"github.com/cockroachdb/cockroach/pkg/testutils/skip"
3637
"github.com/cockroachdb/cockroach/pkg/testutils/sqlutils"
3738
"github.com/cockroachdb/cockroach/pkg/testutils/testcluster"
3839
"github.com/cockroachdb/cockroach/pkg/util/leaktest"
@@ -57,6 +58,8 @@ func TestImportMultiRegion(t *testing.T) {
5758
defer leaktest.AfterTest(t)()
5859
defer log.Scope(t).Close(t)
5960

61+
skip.UnderRace(t, "takes >1min under race")
62+
6063
baseDir := sharedTestdata(t)
6164
tc, sqlDB, cleanup := multiregionccltestutils.TestingCreateMultiRegionCluster(
6265
t, 3 /* numServers */, base.TestingKnobs{}, multiregionccltestutils.WithBaseDirectory(baseDir),
@@ -98,54 +101,6 @@ func TestImportMultiRegion(t *testing.T) {
98101
}))
99102
defer srv.Close()
100103

101-
viewsAndSequencesTestCases := []struct {
102-
desc string
103-
importSQL string
104-
expected map[string]string
105-
}{
106-
{
107-
desc: "pgdump",
108-
importSQL: `IMPORT PGDUMP 'nodelocal://1/pgdump/views_and_sequences.sql' WITH ignore_unsupported_statements`,
109-
expected: map[string]string{
110-
"tbl": "REGIONAL BY TABLE IN PRIMARY REGION",
111-
"s": "REGIONAL BY TABLE IN PRIMARY REGION",
112-
// views are ignored.
113-
},
114-
},
115-
{
116-
desc: "mysqldump",
117-
importSQL: `IMPORT MYSQLDUMP 'nodelocal://1/mysqldump/views_and_sequences.sql'`,
118-
expected: map[string]string{
119-
"tbl": "REGIONAL BY TABLE IN PRIMARY REGION",
120-
"tbl_auto_inc": "REGIONAL BY TABLE IN PRIMARY REGION",
121-
// views are ignored.
122-
},
123-
},
124-
}
125-
126-
for _, tc := range viewsAndSequencesTestCases {
127-
t.Run(tc.desc, func(t *testing.T) {
128-
tdb.Exec(t, `USE multi_region`)
129-
defer tdb.Exec(t, `
130-
DROP TABLE IF EXISTS tbl;
131-
DROP SEQUENCE IF EXISTS s;
132-
DROP SEQUENCE IF EXISTS table_auto_inc;
133-
DROP VIEW IF EXISTS v`,
134-
)
135-
136-
tdb.Exec(t, tc.importSQL)
137-
rows := tdb.Query(t, "SELECT table_name, locality FROM [SHOW TABLES] ORDER BY table_name")
138-
results := make(map[string]string)
139-
for rows.Next() {
140-
var tableName, locality string
141-
require.NoError(t, rows.Scan(&tableName, &locality))
142-
results[tableName] = locality
143-
}
144-
require.NoError(t, rows.Err())
145-
require.Equal(t, tc.expected, results)
146-
})
147-
}
148-
149104
t.Run("avro", func(t *testing.T) {
150105
tests := []struct {
151106
name string

pkg/sql/importer/BUILD.bazel

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -140,8 +140,6 @@ go_test(
140140
"read_import_avro_logical_test.go",
141141
"read_import_avro_test.go",
142142
"read_import_base_test.go",
143-
"read_import_mysql_test.go",
144-
"read_import_pgdump_test.go",
145143
"testutils_test.go",
146144
],
147145
data = glob(["testdata/**"]) + ["//c-deps:libgeos"],
@@ -165,7 +163,6 @@ go_test(
165163
"//pkg/jobs/jobspb",
166164
"//pkg/jobs/jobstest",
167165
"//pkg/keys",
168-
"//pkg/keyvisualizer",
169166
"//pkg/kv",
170167
"//pkg/kv/kvclient/kvcoord",
171168
"//pkg/kv/kvpb",
@@ -181,21 +178,17 @@ go_test(
181178
"//pkg/sql",
182179
"//pkg/sql/catalog",
183180
"//pkg/sql/catalog/bootstrap",
184-
"//pkg/sql/catalog/catformat",
185181
"//pkg/sql/catalog/catpb",
186-
"//pkg/sql/catalog/dbdesc",
187182
"//pkg/sql/catalog/descpb",
188183
"//pkg/sql/catalog/descs",
189184
"//pkg/sql/catalog/tabledesc",
190185
"//pkg/sql/distsql",
191186
"//pkg/sql/execinfra",
192187
"//pkg/sql/execinfrapb",
193188
"//pkg/sql/flowinfra",
194-
"//pkg/sql/gcjob",
195189
"//pkg/sql/isql",
196190
"//pkg/sql/parser",
197191
"//pkg/sql/pgwire/pgcode",
198-
"//pkg/sql/pgwire/pgerror",
199192
"//pkg/sql/randgen",
200193
"//pkg/sql/row",
201194
"//pkg/sql/rowenc",
@@ -236,13 +229,10 @@ go_test(
236229
"@com_github_cockroachdb_cockroach_go_v2//crdb",
237230
"@com_github_cockroachdb_errors//:errors",
238231
"@com_github_go_sql_driver_mysql//:mysql",
239-
"@com_github_jackc_pgconn//:pgconn",
240232
"@com_github_jackc_pgx_v4//:pgx",
241-
"@com_github_kr_pretty//:pretty",
242233
"@com_github_lib_pq//:pq",
243234
"@com_github_linkedin_goavro_v2//:goavro",
244235
"@com_github_stretchr_testify//assert",
245236
"@com_github_stretchr_testify//require",
246-
"@io_vitess_vitess//go/vt/sqlparser",
247237
],
248238
)

pkg/sql/importer/import_planning.go

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -139,14 +139,6 @@ var importOptionExpectValues = map[string]exprutil.KVStringOptValidate{
139139

140140
var pgDumpMaxLoggedStmts = 1024
141141

142-
func testingSetMaxLogIgnoredImportStatements(maxLogSize int) (cleanup func()) {
143-
prevLogSize := pgDumpMaxLoggedStmts
144-
pgDumpMaxLoggedStmts = maxLogSize
145-
return func() {
146-
pgDumpMaxLoggedStmts = prevLogSize
147-
}
148-
}
149-
150142
func makeStringSet(opts ...string) map[string]struct{} {
151143
res := make(map[string]struct{}, len(opts))
152144
for _, opt := range opts {

pkg/sql/importer/import_processor_test.go

Lines changed: 1 addition & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -97,8 +97,6 @@ func TestConverterFlushesBatches(t *testing.T) {
9797

9898
tests := []testSpec{
9999
newTestSpec(ctx, t, csvFormat(), "testdata/csv/data-0"),
100-
newTestSpec(ctx, t, mysqlDumpFormat(), "testdata/mysqldump/simple.sql"),
101-
newTestSpec(ctx, t, pgDumpFormat(), "testdata/pgdump/simple.sql"),
102100
newTestSpec(ctx, t, avroFormat(t, roachpb.AvroOptions_OCF), "testdata/avro/simple.ocf"),
103101
}
104102

@@ -272,16 +270,6 @@ func TestImportIgnoresProcessedFiles(t *testing.T) {
272270
newTestSpec(ctx, t, csvFormat(), "testdata/csv/data-0"),
273271
[]int64{0},
274272
},
275-
{
276-
"mysql-one-invalid",
277-
newTestSpec(ctx, t, mysqlDumpFormat(), "testdata/mysqldump/simple.sql", "/_/missing/_"),
278-
[]int64{0, eofOffset},
279-
},
280-
{
281-
"pgdump-one-input",
282-
newTestSpec(ctx, t, pgDumpFormat(), "testdata/pgdump/simple.sql"),
283-
[]int64{0},
284-
},
285273
{
286274
"avro-one-invalid",
287275
newTestSpec(ctx, t, avroFormat(t, roachpb.AvroOptions_OCF), "__invalid__", "testdata/avro/simple.ocf"),
@@ -387,10 +375,8 @@ func TestImportHonorsResumePosition(t *testing.T) {
387375
// contain sufficient number of rows.
388376
testSpecs := []testSpec{
389377
newTestSpec(ctx, t, csvFormat(), "testdata/csv/data-0"),
390-
newTestSpec(ctx, t, mysqlDumpFormat(), "testdata/mysqldump/simple.sql"),
391378
newTestSpec(ctx, t, mysqlOutFormat(), "testdata/mysqlout/csv-ish/simple.txt"),
392379
newTestSpec(ctx, t, pgCopyFormat(), "testdata/pgcopy/default/test.txt"),
393-
newTestSpec(ctx, t, pgDumpFormat(), "testdata/pgdump/simple.sql"),
394380
newTestSpec(ctx, t, avroFormat(t, roachpb.AvroOptions_JSON_RECORDS), "testdata/avro/simple-sorted.json"),
395381
}
396382

@@ -515,10 +501,8 @@ func TestImportHandlesDuplicateKVs(t *testing.T) {
515501
// All imports produce a DuplicateKeyError, which we expect to be propagated.
516502
testSpecs := []testSpec{
517503
newTestSpec(ctx, t, csvFormat(), "testdata/csv/data-0"),
518-
newTestSpec(ctx, t, mysqlDumpFormat(), "testdata/mysqldump/simple.sql"),
519504
newTestSpec(ctx, t, mysqlOutFormat(), "testdata/mysqlout/csv-ish/simple.txt"),
520505
newTestSpec(ctx, t, pgCopyFormat(), "testdata/pgcopy/default/test.txt"),
521-
newTestSpec(ctx, t, pgDumpFormat(), "testdata/pgdump/simple.sql"),
522506
newTestSpec(ctx, t, avroFormat(t, roachpb.AvroOptions_JSON_RECORDS), "testdata/avro/simple-sorted.json"),
523507
}
524508

@@ -942,9 +926,7 @@ func newTestSpec(
942926
descr = descForTable(ctx, t,
943927
"CREATE TABLE simple (i INT PRIMARY KEY, s text )", 100, 150, 200, NoFKs)
944928
case
945-
roachpb.IOFileFormat_Mysqldump,
946929
roachpb.IOFileFormat_MysqlOutfile,
947-
roachpb.IOFileFormat_PgDump,
948930
roachpb.IOFileFormat_PgCopy,
949931
roachpb.IOFileFormat_Avro:
950932
descr = descForTable(ctx, t,
@@ -963,12 +945,8 @@ func newTestSpec(
963945
}
964946
assert.True(t, numCols > 0)
965947

966-
fullTableName := "simple"
967-
if format.Format == roachpb.IOFileFormat_PgDump {
968-
fullTableName = "public.simple"
969-
}
970948
spec.tables = map[string]*execinfrapb.ReadImportDataSpec_ImportTable{
971-
fullTableName: {Desc: descr.TableDesc(), TargetCols: targetCols[0:numCols]},
949+
"simple": {Desc: descr.TableDesc(), TargetCols: targetCols[0:numCols]},
972950
}
973951

974952
for id, path := range inputs {
@@ -978,16 +956,6 @@ func newTestSpec(
978956
return spec
979957
}
980958

981-
func pgDumpFormat() roachpb.IOFileFormat {
982-
return roachpb.IOFileFormat{
983-
Format: roachpb.IOFileFormat_PgDump,
984-
PgDump: roachpb.PgDumpOptions{
985-
MaxRowSize: 64 * 1024,
986-
IgnoreUnsupported: true,
987-
},
988-
}
989-
}
990-
991959
func pgCopyFormat() roachpb.IOFileFormat {
992960
return roachpb.IOFileFormat{
993961
Format: roachpb.IOFileFormat_PgCopy,
@@ -999,12 +967,6 @@ func pgCopyFormat() roachpb.IOFileFormat {
999967
}
1000968
}
1001969

1002-
func mysqlDumpFormat() roachpb.IOFileFormat {
1003-
return roachpb.IOFileFormat{
1004-
Format: roachpb.IOFileFormat_Mysqldump,
1005-
}
1006-
}
1007-
1008970
func mysqlOutFormat() roachpb.IOFileFormat {
1009971
return roachpb.IOFileFormat{
1010972
Format: roachpb.IOFileFormat_MysqlOutfile,

0 commit comments

Comments
 (0)