@@ -13,7 +13,6 @@ import (
1313
1414 "github.com/planetscale/cli/internal/cmdutil"
1515 "github.com/planetscale/cli/internal/printer"
16- querypb "github.com/xelabs/go-mysqlstack/sqlparser/depends/query"
1716 "golang.org/x/sync/errgroup"
1817
1918 "go.uber.org/zap"
@@ -38,6 +37,7 @@ type Config struct {
3837 Shard string
3938 Table string
4039 Outdir string
40+ OutputFormat string
4141 SessionVars []string
4242 Threads int
4343 ChunksizeInMB int
@@ -64,7 +64,8 @@ type Config struct {
6464
6565func NewDefaultConfig () * Config {
6666 return & Config {
67- Threads : 1 ,
67+ Threads : 1 ,
68+ OutputFormat : "sql" ,
6869 }
6970}
7071
@@ -80,6 +81,12 @@ func NewDumper(cfg *Config) (*Dumper, error) {
8081 }, nil
8182}
8283
84+ type dumpContext struct {
85+ fieldNames []string
86+ selfields []string
87+ where string
88+ }
89+
8390func (d * Dumper ) Run (ctx context.Context ) error {
8491 initPool , err := NewPool (d .log , d .cfg .Threads , d .cfg .Address , d .cfg .User , d .cfg .Password , nil , "" )
8592 if err != nil {
@@ -262,51 +269,37 @@ func (d *Dumper) dumpTableSchema(conn *Connection, database string, table string
262269 return nil
263270}
264271
265- // Dump a table in "MySQL" (multi-inserts) format
272+ // Dump a table in the configured output format
266273func (d * Dumper ) dumpTable (ctx context.Context , conn * Connection , database string , table string ) error {
267- var allBytes uint64
268- var allRows uint64
269- var where string
270- var selfields []string
271-
272- fields := make ([]string , 0 )
273- {
274- flds , err := d .dumpableFieldNames (conn , table )
275- if err != nil {
276- return err
277- }
278-
279- for _ , name := range flds {
280- d .log .Debug ("dump" , zap .Any ("filters" , d .cfg .Filters ), zap .String ("table" , table ), zap .String ("field_name" , name ))
281-
282- if _ , ok := d .cfg .Filters [table ][name ]; ok {
283- continue
284- }
274+ var writer TableWriter
275+
276+ switch d .cfg .OutputFormat {
277+ case "json" :
278+ writer = newJSONWriter (d .cfg )
279+ case "csv" :
280+ writer = newCSVWriter (d .cfg )
281+ default :
282+ writer = newSQLWriter (d .cfg , table )
283+ }
285284
286- fields = append (fields , fmt .Sprintf ("`%s`" , name ))
287- replacement , ok := d .cfg .Selects [table ][name ]
288- if ok {
289- selfields = append (selfields , fmt .Sprintf ("%s AS `%s`" , replacement , name ))
290- } else {
291- selfields = append (selfields , fmt .Sprintf ("`%s`" , name ))
292- }
293- }
285+ dumpCtx , err := d .tableDumpContext (conn , table )
286+ if err != nil {
287+ return err
294288 }
295289
296- if v , ok := d . cfg . Wheres [ table ]; ok {
297- where = fmt . Sprintf ( " WHERE %v" , v )
290+ if err := writer . Initialize ( dumpCtx . fieldNames ); err != nil {
291+ return err
298292 }
299293
300- cursor , err := conn .StreamFetch (fmt .Sprintf ("SELECT %s FROM `%s`.`%s` %s" , strings .Join (selfields , ", " ), database , table , where ))
294+ cursor , err := conn .StreamFetch (fmt .Sprintf ("SELECT %s FROM `%s`.`%s` %s" , strings .Join (dumpCtx . selfields , ", " ), database , table , dumpCtx . where ))
301295 if err != nil {
302296 return err
303297 }
298+ defer cursor .Close ()
304299
300+ var allBytes uint64
301+ var allRows uint64
305302 fileNo := 1
306- stmtsize := 0
307- chunkbytes := 0
308- rows := make ([]string , 0 , 256 )
309- inserts := make ([]string , 0 , 256 )
310303 for cursor .Next () {
311304 row , err := cursor .RowValues ()
312305 if err != nil {
@@ -318,42 +311,18 @@ func (d *Dumper) dumpTable(ctx context.Context, conn *Connection, database strin
318311 return ctx .Err ()
319312 }
320313
321- values := make ([]string , 0 , 16 )
322- for _ , v := range row {
323- if v .Raw () == nil {
324- values = append (values , "NULL" )
325- } else {
326- str := v .String ()
327- switch {
328- case v .IsSigned (), v .IsUnsigned (), v .IsFloat (), v .IsIntegral (), v .Type () == querypb .Type_DECIMAL :
329- values = append (values , str )
330- default :
331- values = append (values , fmt .Sprintf ("\" %s\" " , escapeBytes (v .Raw ())))
332- }
333- }
314+ bytesAdded , err := writer .WriteRow (row )
315+ if err != nil {
316+ return err
334317 }
335- r := "(" + strings .Join (values , "," ) + ")"
336- rows = append (rows , r )
337318
338319 allRows ++
339- stmtsize += len (r )
340- chunkbytes += len (r )
341- allBytes += uint64 (len (r ))
342- atomic .AddUint64 (& d .cfg .Allbytes , uint64 (len (r )))
320+ allBytes += uint64 (bytesAdded )
321+ atomic .AddUint64 (& d .cfg .Allbytes , uint64 (bytesAdded ))
343322 atomic .AddUint64 (& d .cfg .Allrows , 1 )
344323
345- if stmtsize >= d .cfg .StmtSize {
346- insertone := fmt .Sprintf ("INSERT INTO `%s`(%s) VALUES\n %s" , table , strings .Join (fields , "," ), strings .Join (rows , ",\n " ))
347- inserts = append (inserts , insertone )
348- rows = rows [:0 ]
349- stmtsize = 0
350- }
351-
352- if (chunkbytes / 1024 / 1024 ) >= d .cfg .ChunksizeInMB {
353- query := strings .Join (inserts , ";\n " ) + ";\n "
354- file := fmt .Sprintf ("%s/%s.%s.%05d.sql" , d .cfg .Outdir , database , table , fileNo )
355- err = writeFile (file , query )
356- if err != nil {
324+ if writer .ShouldFlush () {
325+ if err := writer .Flush (d .cfg .Outdir , database , table , fileNo ); err != nil {
357326 return err
358327 }
359328
@@ -367,26 +336,11 @@ func (d *Dumper) dumpTable(ctx context.Context, conn *Connection, database strin
367336 zap .Int ("thread_conn_id" , conn .ID ),
368337 )
369338
370- inserts = inserts [:0 ]
371- chunkbytes = 0
372339 fileNo ++
373340 }
374341 }
375- if chunkbytes > 0 {
376- if len (rows ) > 0 {
377- insertone := fmt .Sprintf ("INSERT INTO `%s`(%s) VALUES\n %s" , table , strings .Join (fields , "," ), strings .Join (rows , ",\n " ))
378- inserts = append (inserts , insertone )
379- }
380342
381- query := strings .Join (inserts , ";\n " ) + ";\n "
382- file := fmt .Sprintf ("%s/%s.%s.%05d.sql" , d .cfg .Outdir , database , table , fileNo )
383- err = writeFile (file , query )
384- if err != nil {
385- return err
386- }
387- }
388- err = cursor .Close ()
389- if err != nil {
343+ if err := writer .Close (d .cfg .Outdir , database , table , fileNo ); err != nil {
390344 return err
391345 }
392346
@@ -401,6 +355,40 @@ func (d *Dumper) dumpTable(ctx context.Context, conn *Connection, database strin
401355 return nil
402356}
403357
358+ func (d * Dumper ) tableDumpContext (conn * Connection , table string ) (* dumpContext , error ) {
359+ ctx := & dumpContext {}
360+
361+ flds , err := d .dumpableFieldNames (conn , table )
362+ if err != nil {
363+ return nil , err
364+ }
365+
366+ ctx .fieldNames = make ([]string , 0 )
367+ ctx .selfields = make ([]string , 0 )
368+
369+ for _ , name := range flds {
370+ d .log .Debug ("dump" , zap .Any ("filters" , d .cfg .Filters ), zap .String ("table" , table ), zap .String ("field_name" , name ))
371+
372+ if _ , ok := d .cfg .Filters [table ][name ]; ok {
373+ continue
374+ }
375+
376+ ctx .fieldNames = append (ctx .fieldNames , name )
377+ replacement , ok := d .cfg .Selects [table ][name ]
378+ if ok {
379+ ctx .selfields = append (ctx .selfields , fmt .Sprintf ("%s AS `%s`" , replacement , name ))
380+ } else {
381+ ctx .selfields = append (ctx .selfields , fmt .Sprintf ("`%s`" , name ))
382+ }
383+ }
384+
385+ if v , ok := d .cfg .Wheres [table ]; ok {
386+ ctx .where = fmt .Sprintf (" WHERE %v" , v )
387+ }
388+
389+ return ctx , nil
390+ }
391+
404392func (d * Dumper ) allTables (conn * Connection , database string ) ([]string , error ) {
405393 qr , err := conn .Fetch (fmt .Sprintf ("SHOW TABLES FROM `%s`" , database ))
406394 if err != nil {
0 commit comments