Skip to content

Commit 1558766

Browse files
committed
refactor
1 parent 63825d4 commit 1558766

File tree

3 files changed

+112
-180
lines changed

3 files changed

+112
-180
lines changed

cmd/dump.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ func runDump(ctx context.Context, cmd *cli.Command) error {
145145
return err
146146
}
147147

148-
dumper := dump.NewDumper(outType, outFile)
148+
dumper := dump.NewDumper(ctx, outType, outFile)
149149
dumper.Verbose = verbose
150150
dumper.GlobalExcludeAbsPath(outFileName)
151151
defer dumper.Close()

modules/dump/dumper.go

Lines changed: 64 additions & 128 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ package dump
55

66
import (
77
"context"
8+
"errors"
89
"fmt"
910
"io"
1011
"io/fs"
@@ -62,78 +63,48 @@ func IsSubdir(upper, lower string) (bool, error) {
6263
}
6364

6465
type Dumper struct {
65-
format string
66-
output io.Writer
67-
jobs chan archives.ArchiveAsyncJob
68-
done chan error
6966
Verbose bool
7067

68+
jobs chan archives.ArchiveAsyncJob
69+
errArchiveAsync chan error
70+
errArchiveJob chan error
71+
7172
globalExcludeAbsPaths []string
7273
}
7374

74-
func NewDumper(format string, output io.Writer) *Dumper {
75+
func NewDumper(ctx context.Context, format string, output io.Writer) (*Dumper, error) {
7576
d := &Dumper{
76-
format: format,
77-
output: output,
78-
jobs: make(chan archives.ArchiveAsyncJob, 100),
79-
done: make(chan error, 1),
77+
jobs: make(chan archives.ArchiveAsyncJob, 1),
78+
errArchiveAsync: make(chan error, 1),
79+
errArchiveJob: make(chan error, 1),
8080
}
81-
d.startArchiver()
82-
return d
83-
}
8481

85-
func (dumper *Dumper) startArchiver() {
82+
var comp archives.ArchiverAsync
83+
switch format {
84+
case "zip":
85+
comp = archives.Zip{}
86+
case "tar":
87+
comp = archives.Tar{}
88+
case "tar.gz":
89+
comp = archives.CompressedArchive{Compression: archives.Gz{}, Archival: archives.Tar{}}
90+
case "tar.xz":
91+
comp = archives.CompressedArchive{Compression: archives.Xz{}, Archival: archives.Tar{}}
92+
case "tar.bz2":
93+
comp = archives.CompressedArchive{Compression: archives.Bz2{}, Archival: archives.Tar{}}
94+
case "tar.br":
95+
comp = archives.CompressedArchive{Compression: archives.Brotli{}, Archival: archives.Tar{}}
96+
case "tar.lz4":
97+
comp = archives.CompressedArchive{Compression: archives.Lz4{}, Archival: archives.Tar{}}
98+
case "tar.zst":
99+
comp = archives.CompressedArchive{Compression: archives.Zstd{}, Archival: archives.Tar{}}
100+
default:
101+
return nil, fmt.Errorf("unsupported format: %s", format)
102+
}
86103
go func() {
87-
ctx := context.Background()
88-
var err error
89-
90-
switch dumper.format {
91-
case "zip":
92-
err = archives.Zip{}.ArchiveAsync(ctx, dumper.output, dumper.jobs)
93-
case "tar":
94-
err = archives.Tar{}.ArchiveAsync(ctx, dumper.output, dumper.jobs)
95-
case "tar.gz":
96-
comp := archives.CompressedArchive{
97-
Compression: archives.Gz{},
98-
Archival: archives.Tar{},
99-
}
100-
err = comp.ArchiveAsync(ctx, dumper.output, dumper.jobs)
101-
case "tar.xz":
102-
comp := archives.CompressedArchive{
103-
Compression: archives.Xz{},
104-
Archival: archives.Tar{},
105-
}
106-
err = comp.ArchiveAsync(ctx, dumper.output, dumper.jobs)
107-
case "tar.bz2":
108-
comp := archives.CompressedArchive{
109-
Compression: archives.Bz2{},
110-
Archival: archives.Tar{},
111-
}
112-
err = comp.ArchiveAsync(ctx, dumper.output, dumper.jobs)
113-
case "tar.br":
114-
comp := archives.CompressedArchive{
115-
Compression: archives.Brotli{},
116-
Archival: archives.Tar{},
117-
}
118-
err = comp.ArchiveAsync(ctx, dumper.output, dumper.jobs)
119-
case "tar.lz4":
120-
comp := archives.CompressedArchive{
121-
Compression: archives.Lz4{},
122-
Archival: archives.Tar{},
123-
}
124-
err = comp.ArchiveAsync(ctx, dumper.output, dumper.jobs)
125-
case "tar.zst":
126-
comp := archives.CompressedArchive{
127-
Compression: archives.Zstd{},
128-
Archival: archives.Tar{},
129-
}
130-
err = comp.ArchiveAsync(ctx, dumper.output, dumper.jobs)
131-
default:
132-
err = fmt.Errorf("unsupported format: %s", dumper.format)
133-
}
134-
135-
dumper.done <- err
104+
d.errArchiveAsync <- comp.ArchiveAsync(ctx, output, d.jobs)
105+
close(d.errArchiveAsync)
136106
}()
107+
return d, nil
137108
}
138109

139110
// AddFilePath adds a file by its filesystem path
@@ -147,97 +118,62 @@ func (dumper *Dumper) AddFilePath(filePath, absPath string) error {
147118
return err
148119
}
149120

150-
var archiveFileInfo archives.FileInfo
151-
if fileInfo.IsDir() {
152-
archiveFileInfo = archives.FileInfo{
153-
FileInfo: fileInfo,
154-
NameInArchive: filePath,
155-
Open: func() (fs.File, error) {
156-
return &emptyDirFile{info: fileInfo}, nil
157-
},
158-
}
159-
} else {
160-
archiveFileInfo = archives.FileInfo{
161-
FileInfo: fileInfo,
162-
NameInArchive: filePath,
163-
Open: func() (fs.File, error) {
164-
return os.Open(absPath)
165-
},
166-
}
121+
archiveFileInfo := archives.FileInfo{
122+
FileInfo: fileInfo,
123+
NameInArchive: filePath,
124+
Open: func() (fs.File, error) {
125+
return os.Open(absPath)
126+
},
167127
}
168128

169-
resultChan := make(chan error, 1)
170-
job := archives.ArchiveAsyncJob{
129+
dumper.jobs <- archives.ArchiveAsyncJob{
171130
File: archiveFileInfo,
172-
Result: resultChan,
131+
Result: dumper.errArchiveJob,
173132
}
174-
175133
select {
176-
case dumper.jobs <- job:
177-
return <-resultChan
178-
case err := <-dumper.done:
134+
case err = <-dumper.errArchiveAsync:
135+
if err == nil {
136+
return errors.New("archiver has been closed")
137+
}
138+
return err
139+
case err = <-dumper.errArchiveJob:
179140
return err
180141
}
181142
}
182143

144+
type readerFile struct {
145+
r io.Reader
146+
info os.FileInfo
147+
}
148+
149+
var _ fs.File = (*readerFile)(nil)
150+
151+
func (f *readerFile) Stat() (fs.FileInfo, error) { return f.info, nil }
152+
func (f *readerFile) Read(bytes []byte) (int, error) { return f.r.Read(bytes) }
153+
func (f *readerFile) Close() error { return nil }
154+
183155
// AddReader adds a file's contents from a Reader, this uses a pipe to stream files from object store to prevent them from filling up disk
184-
func (dumper *Dumper) AddReader(r io.ReadCloser, info os.FileInfo, customName string) error {
156+
func (dumper *Dumper) AddReader(r io.Reader, info os.FileInfo, customName string) error {
185157
if dumper.Verbose {
186158
log.Info("Adding file %s", customName)
187159
}
188160

189-
pr, pw := io.Pipe()
190-
191161
fileInfo := archives.FileInfo{
192162
FileInfo: info,
193163
NameInArchive: customName,
194-
Open: func() (fs.File, error) {
195-
go func() {
196-
defer pw.Close()
197-
_, err := io.Copy(pw, r)
198-
r.Close()
199-
if err != nil {
200-
pw.CloseWithError(err)
201-
}
202-
}()
203-
204-
return &pipeFile{PipeReader: pr, info: info}, nil
205-
},
164+
Open: func() (fs.File, error) { return &readerFile{r, info}, nil },
206165
}
207166

208-
resultChan := make(chan error, 1)
209-
job := archives.ArchiveAsyncJob{
167+
dumper.jobs <- archives.ArchiveAsyncJob{
210168
File: fileInfo,
211-
Result: resultChan,
169+
Result: dumper.errArchiveJob,
212170
}
213-
214-
select {
215-
case dumper.jobs <- job:
216-
return <-resultChan
217-
case err := <-dumper.done:
218-
return err
219-
}
220-
}
221-
222-
// pipeFile makes io.PipeReader compatible with fs.File interface
223-
type pipeFile struct {
224-
*io.PipeReader
225-
info os.FileInfo
171+
return <-dumper.errArchiveJob
226172
}
227173

228-
func (f *pipeFile) Stat() (fs.FileInfo, error) { return f.info, nil }
229-
230-
type emptyDirFile struct {
231-
info os.FileInfo
232-
}
233-
234-
func (f *emptyDirFile) Read([]byte) (int, error) { return 0, io.EOF }
235-
func (f *emptyDirFile) Close() error { return nil }
236-
func (f *emptyDirFile) Stat() (fs.FileInfo, error) { return f.info, nil }
237-
238174
func (dumper *Dumper) Close() error {
239175
close(dumper.jobs)
240-
return <-dumper.done
176+
return <-dumper.errArchiveAsync
241177
}
242178

243179
// AddFile kept for backwards compatibility since streaming is more efficient

modules/dump/dumper_test.go

Lines changed: 47 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,11 @@ import (
1111
"os"
1212
"path/filepath"
1313
"sort"
14-
"strings"
1514
"testing"
1615
"time"
1716

1817
"code.gitea.io/gitea/modules/timeutil"
18+
"github.com/stretchr/testify/require"
1919

2020
"github.com/stretchr/testify/assert"
2121
)
@@ -71,74 +71,70 @@ func TestIsSubDir(t *testing.T) {
7171

7272
func TestDumperIntegration(t *testing.T) {
7373
var buf bytes.Buffer
74-
dumper := NewDumper("zip", &buf)
74+
dumper, err := NewDumper(t.Context(), "zip", &buf)
75+
require.NoError(t, err)
7576

76-
testContent := "test content"
77-
testReader := io.NopCloser(strings.NewReader(testContent))
78-
testInfo := &testFileInfo{name: "test.txt", size: int64(len(testContent))}
77+
tmpDir := t.TempDir()
78+
_ = os.WriteFile(filepath.Join(tmpDir, "test.txt"), nil, 0o644)
79+
f, _ := os.Open(filepath.Join(tmpDir, "test.txt"))
7980

80-
err := dumper.AddReader(testReader, testInfo, "test.txt")
81-
assert.NoError(t, err)
81+
fi, _ := f.Stat()
82+
err = dumper.AddReader(f, fi, "test.txt")
83+
require.NoError(t, err)
8284

8385
err = dumper.Close()
84-
assert.NoError(t, err)
86+
require.NoError(t, err)
8587

86-
assert.Positive(t, buf.Len(), "Archive should contain data")
88+
assert.Positive(t, buf.Len())
8789
}
8890

89-
type testFileInfo struct {
90-
name string
91-
size int64
92-
}
93-
94-
func (t *testFileInfo) Name() string { return t.name }
95-
func (t *testFileInfo) Size() int64 { return t.size }
96-
func (t *testFileInfo) Mode() os.FileMode { return 0o644 }
97-
func (t *testFileInfo) ModTime() time.Time { return time.Now() }
98-
func (t *testFileInfo) IsDir() bool { return false }
99-
func (t *testFileInfo) Sys() any { return nil }
100-
10191
func TestDumper(t *testing.T) {
10292
tmpDir := t.TempDir()
10393
_ = os.MkdirAll(filepath.Join(tmpDir, "include/exclude1"), 0o755)
10494
_ = os.MkdirAll(filepath.Join(tmpDir, "include/exclude2"), 0o755)
10595
_ = os.MkdirAll(filepath.Join(tmpDir, "include/sub"), 0o755)
106-
_ = os.WriteFile(filepath.Join(tmpDir, "include/a"), []byte("content-a"), 0o644)
107-
_ = os.WriteFile(filepath.Join(tmpDir, "include/sub/b"), []byte("content-b"), 0o644)
108-
_ = os.WriteFile(filepath.Join(tmpDir, "include/exclude1/a-1"), []byte("content-a-1"), 0o644)
109-
_ = os.WriteFile(filepath.Join(tmpDir, "include/exclude2/a-2"), []byte("content-a-2"), 0o644)
110-
111-
var buf1 bytes.Buffer
112-
dumper1 := NewDumper("tar", &buf1)
113-
dumper1.GlobalExcludeAbsPath(filepath.Join(tmpDir, "include/exclude1"))
114-
err := dumper1.AddRecursiveExclude("include", filepath.Join(tmpDir, "include"), []string{filepath.Join(tmpDir, "include/exclude2")})
115-
assert.NoError(t, err)
116-
err = dumper1.Close()
117-
assert.NoError(t, err)
96+
_ = os.WriteFile(filepath.Join(tmpDir, "include/a"), nil, 0o644)
97+
_ = os.WriteFile(filepath.Join(tmpDir, "include/sub/b"), nil, 0o644)
98+
_ = os.WriteFile(filepath.Join(tmpDir, "include/exclude1/a-1"), nil, 0o644)
99+
_ = os.WriteFile(filepath.Join(tmpDir, "include/exclude2/a-2"), nil, 0o644)
118100

119-
files1 := extractTarFileNames(t, &buf1)
120101
sortStrings := func(s []string) []string {
121102
sort.Strings(s)
122103
return s
123104
}
124105

125-
expected1 := []string{"include/a", "include/sub", "include/sub/b"}
126-
assert.Equal(t, sortStrings(expected1), sortStrings(files1))
127-
128-
var buf2 bytes.Buffer
129-
dumper2 := NewDumper("tar", &buf2)
130-
err = dumper2.AddRecursiveExclude("include", filepath.Join(tmpDir, "include"), nil)
131-
assert.NoError(t, err)
132-
err = dumper2.Close()
133-
assert.NoError(t, err)
134-
135-
files2 := extractTarFileNames(t, &buf2)
136-
expected2 := []string{
137-
"include/exclude2", "include/exclude2/a-2",
138-
"include/a", "include/sub", "include/sub/b",
139-
"include/exclude1", "include/exclude1/a-1",
140-
}
141-
assert.Equal(t, sortStrings(expected2), sortStrings(files2))
106+
t.Run("IncludesWithExcludes", func(t *testing.T) {
107+
var buf bytes.Buffer
108+
dumper, err := NewDumper(t.Context(), "tar", &buf)
109+
require.NoError(t, err)
110+
dumper.GlobalExcludeAbsPath(filepath.Join(tmpDir, "include/exclude1"))
111+
err = dumper.AddRecursiveExclude("include", filepath.Join(tmpDir, "include"), []string{filepath.Join(tmpDir, "include/exclude2")})
112+
require.NoError(t, err)
113+
err = dumper.Close()
114+
require.NoError(t, err)
115+
116+
files := extractTarFileNames(t, &buf)
117+
expected := []string{"include/a", "include/sub", "include/sub/b"}
118+
assert.Equal(t, sortStrings(expected), sortStrings(files))
119+
})
120+
121+
t.Run("IncludesAll", func(t *testing.T) {
122+
var buf bytes.Buffer
123+
dumper, err := NewDumper(t.Context(), "tar", &buf)
124+
require.NoError(t, err)
125+
err = dumper.AddRecursiveExclude("include", filepath.Join(tmpDir, "include"), nil)
126+
require.NoError(t, err)
127+
err = dumper.Close()
128+
require.NoError(t, err)
129+
130+
files := extractTarFileNames(t, &buf)
131+
expected := []string{
132+
"include/exclude2", "include/exclude2/a-2",
133+
"include/a", "include/sub", "include/sub/b",
134+
"include/exclude1", "include/exclude1/a-1",
135+
}
136+
assert.Equal(t, sortStrings(expected), sortStrings(files))
137+
})
142138
}
143139

144140
func extractTarFileNames(t *testing.T, buf *bytes.Buffer) []string {

0 commit comments

Comments
 (0)