Skip to content

Commit b29910d

Browse files
committed
.
1 parent 3d5e4e5 commit b29910d

File tree

5 files changed

+47
-46
lines changed

5 files changed

+47
-46
lines changed

drivers/139/driver.go

Lines changed: 21 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -533,18 +533,19 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
533533
}
534534
}
535535

536-
partInfos := []PartInfo{}
537-
var partSize = d.getPartSize(stream.GetSize())
538-
part := (stream.GetSize() + partSize - 1) / partSize
539-
if part == 0 {
540-
part = 1
541-
}
542-
for i := int64(0); i < part; i++ {
536+
size := stream.GetSize()
537+
var partSize = d.getPartSize(size)
538+
part := size / partSize
539+
if size%partSize > 0 {
540+
part++
541+
}
542+
partInfos := make([]PartInfo, 0, part)
543+
for i := range part {
543544
if utils.IsCanceled(ctx) {
544545
return ctx.Err()
545546
}
546547
start := i * partSize
547-
byteSize := stream.GetSize() - start
548+
byteSize := size - start
548549
if byteSize > partSize {
549550
byteSize = partSize
550551
}
@@ -572,7 +573,7 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
572573
"contentType": "application/octet-stream",
573574
"parallelUpload": false,
574575
"partInfos": firstPartInfos,
575-
"size": stream.GetSize(),
576+
"size": size,
576577
"parentFileId": dstDir.GetID(),
577578
"name": stream.GetName(),
578579
"type": "file",
@@ -625,7 +626,7 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
625626
}
626627

627628
// Progress
628-
p := driver.NewProgress(stream.GetSize(), up)
629+
p := driver.NewProgress(size, up)
629630

630631
rateLimited := driver.NewLimitedUploadStream(ctx, stream)
631632
// 上传所有分片
@@ -776,22 +777,22 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
776777
return err
777778
}
778779

779-
// Progress
780-
p := driver.NewProgress(stream.GetSize(), up)
781-
782-
var partSize = d.getPartSize(stream.GetSize())
783-
part := (stream.GetSize() + partSize - 1) / partSize
784-
if part == 0 {
785-
part = 1
780+
size := stream.GetSize()
781+
var partSize = d.getPartSize(size)
782+
part := size / partSize
783+
if size%partSize > 0 {
784+
part++
786785
}
786+
// Progress
787+
p := driver.NewProgress(size, up)
787788
rateLimited := driver.NewLimitedUploadStream(ctx, stream)
788-
for i := int64(0); i < part; i++ {
789+
for i := range part {
789790
if utils.IsCanceled(ctx) {
790791
return ctx.Err()
791792
}
792793

793794
start := i * partSize
794-
byteSize := stream.GetSize() - start
795+
byteSize := size - start
795796
if byteSize > partSize {
796797
byteSize = partSize
797798
}
@@ -806,7 +807,7 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
806807

807808
req = req.WithContext(ctx)
808809
req.Header.Set("Content-Type", "text/plain;name="+unicode(stream.GetName()))
809-
req.Header.Set("contentSize", strconv.FormatInt(stream.GetSize(), 10))
810+
req.Header.Set("contentSize", strconv.FormatInt(size, 10))
810811
req.Header.Set("range", fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1))
811812
req.Header.Set("uploadtaskID", resp.Data.UploadResult.UploadTaskID)
812813
req.Header.Set("rangeType", "0")

drivers/189pc/utils.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -525,7 +525,7 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
525525
break
526526
}
527527
byteData := make([]byte, sliceSize)
528-
if i == count {
528+
if i == count && lastSliceSize > 0 {
529529
byteData = byteData[:lastSliceSize]
530530
}
531531

@@ -647,7 +647,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
647647
return nil, ctx.Err()
648648
}
649649

650-
if i == count {
650+
if i == count && lastSliceSize > 0 {
651651
byteSize = lastSliceSize
652652
}
653653

drivers/quark_uc/driver.go

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -178,27 +178,28 @@ func (d *QuarkOrUC) Put(ctx context.Context, dstDir model.Obj, stream model.File
178178
return nil
179179
}
180180
// part up
181-
partSize := pre.Metadata.PartSize
182-
var part []byte
183-
md5s := make([]string, 0)
184-
defaultBytes := make([]byte, partSize)
185181
total := stream.GetSize()
186-
left := total
182+
partSize := int64(pre.Metadata.PartSize)
183+
part := make([]byte, pre.Metadata.PartSize)
184+
count := int(total / partSize)
185+
if total%partSize > 0 {
186+
count++
187+
}
188+
md5s := make([]string, 0, count)
187189
partNumber := 1
190+
left := total
188191
for left > 0 {
189192
if utils.IsCanceled(ctx) {
190193
return ctx.Err()
191194
}
192-
if left > int64(partSize) {
193-
part = defaultBytes
194-
} else {
195-
part = make([]byte, left)
195+
if left < partSize {
196+
part = part[:left]
196197
}
197-
_, err := io.ReadFull(stream, part)
198+
n, err := io.ReadFull(stream, part)
198199
if err != nil {
199200
return err
200201
}
201-
left -= int64(len(part))
202+
left -= int64(n)
202203
log.Debugf("left: %d", left)
203204
reader := driver.NewLimitedUploadStream(ctx, bytes.NewReader(part))
204205
m, err := d.upPart(ctx, pre, stream.GetMimetype(), partNumber, reader)

drivers/terabox/driver.go

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@ import (
66
"encoding/hex"
77
"fmt"
88
"io"
9-
"math"
109
stdpath "path"
1110
"strconv"
1211

@@ -189,25 +188,24 @@ func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
189188
streamSize := stream.GetSize()
190189
chunkSize := calculateChunkSize(streamSize)
191190
chunkByteData := make([]byte, chunkSize)
192-
count := int(math.Ceil(float64(streamSize) / float64(chunkSize)))
193-
left := streamSize
191+
count := int(streamSize / chunkSize)
192+
if streamSize%chunkSize > 0 {
193+
count++
194+
}
194195
uploadBlockList := make([]string, 0, count)
196+
left := streamSize
195197
md5 := utils.MD5.NewFunc()
196198
reader := io.TeeReader(stream, md5)
197-
for partseq := 0; partseq < count; partseq++ {
199+
for partseq := range count {
198200
if utils.IsCanceled(ctx) {
199201
return ctx.Err()
200202
}
201203
byteSize := chunkSize
202-
var byteData []byte
203-
if left >= chunkSize {
204-
byteData = chunkByteData
205-
} else {
206-
byteSize = left
207-
byteData = make([]byte, byteSize)
204+
if left < chunkSize {
205+
chunkByteData = chunkByteData[:left]
208206
}
209207
left -= byteSize
210-
_, err = io.ReadFull(reader, byteData)
208+
_, err = io.ReadFull(reader, chunkByteData)
211209
if err != nil {
212210
return err
213211
}
@@ -221,7 +219,7 @@ func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
221219
res, err := base.RestyClient.R().
222220
SetContext(ctx).
223221
SetQueryParams(params).
224-
SetFileReader("file", stream.GetName(), driver.NewLimitedUploadStream(ctx, bytes.NewReader(byteData))).
222+
SetFileReader("file", stream.GetName(), driver.NewLimitedUploadStream(ctx, bytes.NewReader(chunkByteData))).
225223
SetHeader("Cookie", d.Cookie).
226224
Post(u)
227225
if err != nil {

internal/net/request.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -248,8 +248,9 @@ func (d *downloader) sendChunkTask(newConcurrency bool) error {
248248
size: finalSize,
249249
id: d.nextChunk,
250250
buf: buf,
251+
252+
newConcurrency: newConcurrency,
251253
}
252-
ch.newConcurrency = newConcurrency
253254
d.pos += finalSize
254255
d.nextChunk++
255256
d.chunkChannel <- ch

0 commit comments

Comments
 (0)