Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 39 additions & 0 deletions backend/drive/drive_internal_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -643,6 +643,44 @@ func (f *Fs) InternalTestAgeQuery(t *testing.T) {
assert.Contains(t, subFs.lastQuery, timeQuery)
}

// TestIntegration/FsMkdir/FsPutFiles/Internal/SingleQuoteFolder
func (f *Fs) InternalTestSingleQuoteFolder(t *testing.T) {
ctx := context.Background()

// Test various folder names containing single quotes
for _, name := range []string{
"'",
"''",
"'a'",
"it's a test",
} {
t.Run(name, func(t *testing.T) {
dir := "singleQuoteTest/" + name
err := f.Mkdir(ctx, dir)
require.NoError(t, err)
defer func() {
err := f.Rmdir(ctx, dir)
assert.NoError(t, err)
}()

entries, err := f.List(ctx, "singleQuoteTest")
require.NoError(t, err)

found := false
for _, entry := range entries {
if entry.Remote() == dir {
found = true
break
}
}
assert.True(t, found, "directory %q not found in listing", name)
})
}

err := f.Rmdir(ctx, "singleQuoteTest")
assert.NoError(t, err)
}

func (f *Fs) InternalTest(t *testing.T) {
// These tests all depend on each other so run them as nested tests
t.Run("DocumentImport", func(t *testing.T) {
Expand All @@ -662,6 +700,7 @@ func (f *Fs) InternalTest(t *testing.T) {
t.Run("CopyOrMoveID", f.InternalTestCopyOrMoveID)
t.Run("Query", f.InternalTestQuery)
t.Run("AgeQuery", f.InternalTestAgeQuery)
t.Run("SingleQuoteFolder", f.InternalTestSingleQuoteFolder)
t.Run("ShouldRetry", f.InternalTestShouldRetry)
}

Expand Down
33 changes: 16 additions & 17 deletions backend/internxt/internxt.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import (
"io"
"net"
"path"
"path/filepath"
"strings"
"sync"
"time"
Expand Down Expand Up @@ -508,8 +507,8 @@ func (f *Fs) CreateDir(ctx context.Context, pathID, leaf string) (string, error)
func (f *Fs) preUploadCheck(ctx context.Context, leaf, directoryID string) (*folders.File, error) {
// Parse name and extension from the leaf
baseName := f.opt.Encoding.FromStandardName(leaf)
name := strings.TrimSuffix(baseName, filepath.Ext(baseName))
ext := strings.TrimPrefix(filepath.Ext(baseName), ".")
name := strings.TrimSuffix(baseName, path.Ext(baseName))
ext := strings.TrimPrefix(path.Ext(baseName), ".")

checkResult, err := files.CheckFilesExistence(ctx, f.cfg, directoryID, []files.FileExistenceCheck{
{
Expand Down Expand Up @@ -580,7 +579,7 @@ func (f *Fs) List(ctx context.Context, dir string) (fs.DirEntries, error) {
return nil, err
}
for _, e := range foldersList {
remote := filepath.Join(dir, f.opt.Encoding.ToStandardName(e.PlainName))
remote := path.Join(dir, f.opt.Encoding.ToStandardName(e.PlainName))
out = append(out, fs.NewDir(remote, e.ModificationTime))
}
var filesList []folders.File
Expand All @@ -597,7 +596,7 @@ func (f *Fs) List(ctx context.Context, dir string) (fs.DirEntries, error) {
if len(e.Type) > 0 {
remote += "." + e.Type
}
remote = filepath.Join(dir, f.opt.Encoding.ToStandardName(remote))
remote = path.Join(dir, f.opt.Encoding.ToStandardName(remote))
out = append(out, newObjectWithFile(f, remote, &e))
}
return out, nil
Expand Down Expand Up @@ -676,7 +675,7 @@ func (f *Fs) Remove(ctx context.Context, remote string) error {

// NewObject creates a new object
func (f *Fs) NewObject(ctx context.Context, remote string) (fs.Object, error) {
parentDir := filepath.Dir(remote)
parentDir := path.Dir(remote)

if parentDir == "." {
parentDir = ""
Expand All @@ -696,7 +695,7 @@ func (f *Fs) NewObject(ctx context.Context, remote string) (fs.Object, error) {
if err != nil {
return nil, err
}
targetName := filepath.Base(remote)
targetName := path.Base(remote)
for _, e := range files {
name := e.PlainName
if len(e.Type) > 0 {
Expand Down Expand Up @@ -837,9 +836,9 @@ func (o *Object) Open(ctx context.Context, options ...fs.OpenOption) (io.ReadClo
func (o *Object) Update(ctx context.Context, in io.Reader, src fs.ObjectInfo, options ...fs.OpenOption) error {
remote := o.remote

origBaseName := filepath.Base(remote)
origName := strings.TrimSuffix(origBaseName, filepath.Ext(origBaseName))
origType := strings.TrimPrefix(filepath.Ext(origBaseName), ".")
origBaseName := path.Base(remote)
origName := strings.TrimSuffix(origBaseName, path.Ext(origBaseName))
origType := strings.TrimPrefix(path.Ext(origBaseName), ".")

// Create directory if it doesn't exist
_, dirID, err := o.f.dirCache.FindPath(ctx, remote, true)
Expand All @@ -857,9 +856,9 @@ func (o *Object) Update(ctx context.Context, in io.Reader, src fs.ObjectInfo, op
// Step 1: If file exists, rename to backup (preserves old file during upload)
if oldUUID != "" {
// Generate unique backup name
baseName := filepath.Base(remote)
name := strings.TrimSuffix(baseName, filepath.Ext(baseName))
ext := strings.TrimPrefix(filepath.Ext(baseName), ".")
baseName := path.Base(remote)
name := strings.TrimSuffix(baseName, path.Ext(baseName))
ext := strings.TrimPrefix(path.Ext(baseName), ".")

backupSuffix := fmt.Sprintf(".rclone-backup-%s", random.String(8))
backupName = o.f.opt.Encoding.FromStandardName(name + backupSuffix)
Expand Down Expand Up @@ -891,7 +890,7 @@ func (o *Object) Update(ctx context.Context, in io.Reader, src fs.ObjectInfo, op
meta, err = buckets.UploadFileStreamAuto(ctx,
o.f.cfg,
dirID,
o.f.opt.Encoding.FromStandardName(filepath.Base(remote)),
o.f.opt.Encoding.FromStandardName(path.Base(remote)),
in,
src.Size(),
src.ModTime(ctx),
Expand Down Expand Up @@ -981,7 +980,7 @@ func (o *Object) recoverFromTimeoutConflict(ctx context.Context, uploadErr error
return nil, uploadErr
}

baseName := filepath.Base(remote)
baseName := path.Base(remote)
encodedName := o.f.opt.Encoding.FromStandardName(baseName)

var meta *buckets.CreateMetaResponse
Expand All @@ -991,8 +990,8 @@ func (o *Object) recoverFromTimeoutConflict(ctx context.Context, uploadErr error
return o.f.shouldRetry(ctx, err)
}
if existingFile != nil {
name := strings.TrimSuffix(baseName, filepath.Ext(baseName))
ext := strings.TrimPrefix(filepath.Ext(baseName), ".")
name := strings.TrimSuffix(baseName, path.Ext(baseName))
ext := strings.TrimPrefix(path.Ext(baseName), ".")

meta = &buckets.CreateMetaResponse{
UUID: existingFile.UUID,
Expand Down
12 changes: 9 additions & 3 deletions backend/s3/provider/IONOS.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,17 @@ description: IONOS Cloud
region:
de: Frankfurt, Germany
eu-central-2: Berlin, Germany
eu-central-3: Berlin, Germany
eu-central-4: Frankfurt, Germany
eu-south-2: Logrono, Spain
us-central-1: Lenexa, USA
endpoint:
s3-eu-central-1.ionoscloud.com: Frankfurt, Germany
s3-eu-central-2.ionoscloud.com: Berlin, Germany
s3-eu-south-2.ionoscloud.com: Logrono, Spain
s3.eu-central-1.ionoscloud.com: Frankfurt, Germany
s3.eu-central-2.ionoscloud.com: Berlin, Germany
s3.eu-central-3.ionoscloud.com: Berlin, Germany
s3.eu-central-4.ionoscloud.com: Frankfurt, Germany
s3.eu-south-2.ionoscloud.com: Logrono, Spain
s3.us-central-1.ionoscloud.com: Lenexa, USA
acl: {}
bucket_acl: true
quirks:
Expand Down
4 changes: 2 additions & 2 deletions backend/s3/provider/Scaleway.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@ storage_class:
GLACIER: |-
Archived storage.
Prices are lower, but it needs to be restored first to be accessed.
Available in FR-PAR and NL-AMS regions.
Available in the FR-PAR region only.
ONEZONE_IA: |-
One Zone - Infrequent Access.
A good choice for storing secondary backup copies or easily re-creatable data.
Available in the FR-PAR region only.
Available in all regions.
bucket_acl: true
quirks:
max_upload_parts: 1000
2 changes: 1 addition & 1 deletion backend/webdav/webdav.go
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ func (f *Fs) readMetaDataForPath(ctx context.Context, path string, depth string)
ExtraHeaders: map[string]string{
"Depth": depth,
},
NoRedirect: true,
CheckRedirect: rest.PreserveMethodRedirectFn,
}
if f.hasOCMD5 || f.hasOCSHA1 {
opts.Body = bytes.NewBuffer(owncloudProps)
Expand Down
13 changes: 9 additions & 4 deletions cmd/touch/touch.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,11 @@ time instead of the current time. Times may be specified as one of:
- 'YYYY-MM-DDTHH:MM:SS.SSS' - e.g. 2006-01-02T15:04:05.123456789

Note that value of ` + "`--timestamp`" + ` is in UTC. If you want local time
then add the ` + "`--localtime`" + ` flag.`,
then add the ` + "`--localtime`" + ` flag.

Metadata can be added when creating a new file with ` + "`--metadata-set`" + `.
For example:
rclone touch remote:path -M --metadata-set key=value`,
Annotations: map[string]string{
"versionIntroduced": "v1.39",
"groups": "Filter,Listing,Important",
Expand Down Expand Up @@ -123,10 +127,10 @@ func timeOfTouch() (time.Time, error) {
}

// createEmptyObject creates an empty object (file) with specified timestamp
func createEmptyObject(ctx context.Context, remote string, modTime time.Time, f fs.Fs) error {
func createEmptyObject(ctx context.Context, remote string, modTime time.Time, f fs.Fs, options []fs.OpenOption) error {
var buffer []byte
src := object.NewStaticObjectInfo(remote, modTime, int64(len(buffer)), true, nil, f)
_, err := f.Put(ctx, bytes.NewBuffer(buffer), src)
_, err := f.Put(ctx, bytes.NewBuffer(buffer), src, options...)
return err
}

Expand Down Expand Up @@ -163,7 +167,8 @@ func Touch(ctx context.Context, f fs.Fs, remote string) error {
return nil
}
fs.Debugf(f, "Touching (creating) %q", remote)
if err = createEmptyObject(ctx, remote, t, f); err != nil {
options := fs.MetadataAsOpenOptions(ctx)
if err = createEmptyObject(ctx, remote, t, f, options); err != nil {
return fmt.Errorf("failed to touch (create): %w", err)
}
}
Expand Down
29 changes: 29 additions & 0 deletions cmd/touch/touch_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -150,3 +150,32 @@ func TestRecursiveTouchDirWithFiles(t *testing.T) {
require.NoError(t, err)
fstest.CheckListingWithPrecision(t, r.Fremote, []fstest.Item{file1, file2, file3}, []string{"a", "a/b", "a/b/c"}, fs.ModTimeNotSupported)
}

func TestTouchWithMetadata(t *testing.T) {
r := fstest.NewRun(t)
ctx := context.Background()
features := r.Fremote.Features()
if !features.UserMetadata {
t.Skip("Skipping metadata test; backend does not support user metadata")
}

ci := fs.GetConfig(ctx)
origMetadata := ci.Metadata
origMetadataSet := ci.MetadataSet
t.Cleanup(func() {
ci.Metadata = origMetadata
ci.MetadataSet = origMetadataSet
})

ci.Metadata = true
ci.MetadataSet = fs.Metadata{
"testkey": "testvalue",
}

err := Touch(ctx, r.Fremote, "metaFile")
require.NoError(t, err)

o, err := r.Fremote.NewObject(ctx, "metaFile")
require.NoError(t, err)
fstest.CheckEntryMetadata(ctx, t, r.Fremote, o, ci.MetadataSet)
}
3 changes: 3 additions & 0 deletions docs/content/authors.md
Original file line number Diff line number Diff line change
Expand Up @@ -1082,3 +1082,6 @@ put them back in again. -->
- Shlomi Avihou <shlomi@zadarastorage.com>
- Chris <238498929+chris081519-crypto@users.noreply.github.com>
- Jan-Philipp Reßler <xodarap@xodarap.de>
- Dark Dragon <darkdragon-001@web.de>
- Leon Brocard <lbrocard@fastly.com>
- a1pcm <a1pcm@users.noreply.github.com>
4 changes: 4 additions & 0 deletions docs/content/docs.md
Original file line number Diff line number Diff line change
Expand Up @@ -2961,6 +2961,10 @@ knowing the local file is newer than the time it was last uploaded to the
remote is sufficient. In those cases, this flag can speed up the process and
reduce the number of API calls necessary.

This flag is only supported on certain backends and will be silently
ignored on unsupported backends. Supported backends include
`azureblob`, `oracleobjectstorage`, `s3`, `swift`.

Using this flag on a sync operation without also using `--update` would cause
all files modified at any time other than the last upload time to be uploaded
again, which is probably not what you want.
Expand Down
14 changes: 14 additions & 0 deletions docs/content/s3.md
Original file line number Diff line number Diff line change
Expand Up @@ -671,6 +671,20 @@ work with the SDK properly:
| . | . |
| .. | .. |

#### Important note about double slashes (`//`)

Object keys containing consecutive forward slashes (`//`) are **not supported** by rclone.

When rclone encounters an object key with `//` (e.g., `a//b`), it will normalize the path to use a single slash (e.g., `a/b`). This normalization can cause "object not found" errors when trying to access the original object, as rclone will look for the normalized path instead of the actual object key.

**Example:**
- Original S3 object key: `folder//file.txt`
- rclone interprets it as: `folder/file.txt`
- Result: "object not found" error when trying to access `folder//file.txt`

**Workaround:**
Avoid using consecutive forward slashes (`//`) in S3 object keys when using rclone. If you have existing objects with `//` in their keys, you will need to rename them to use single slashes or access them through other means.

### Multipart uploads

rclone supports multipart uploads with S3 which means that it can
Expand Down
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ require (
github.com/golang-jwt/jwt/v5 v5.3.1
github.com/google/uuid v1.6.0
github.com/hanwen/go-fuse/v2 v2.9.0
github.com/internxt/rclone-adapter v0.0.0-20260213125353-6f59c89fcb7c
github.com/internxt/rclone-adapter v0.0.0-20260220172730-613f4cc8b8fd
github.com/jcmturner/gokrb5/v8 v8.4.4
github.com/jlaffaye/ftp v0.2.1-0.20240918233326-1b970516f5d3
github.com/josephspurrier/goversioninfo v1.5.0
Expand Down
4 changes: 2 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -423,8 +423,8 @@ github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyf
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/internxt/rclone-adapter v0.0.0-20260213125353-6f59c89fcb7c h1:r+KtxPyrhsYeNbsfeqTfEM8xRdwgV6LuNhLZxpXecb4=
github.com/internxt/rclone-adapter v0.0.0-20260213125353-6f59c89fcb7c/go.mod h1:vdPya4AIcDjvng4ViaAzqjegJf0VHYpYHQguFx5xBp0=
github.com/internxt/rclone-adapter v0.0.0-20260220172730-613f4cc8b8fd h1:dSIuz2mpJAPQfhHYtG57D0qwSkgC/vQ69gHfeyQ4kxA=
github.com/internxt/rclone-adapter v0.0.0-20260220172730-613f4cc8b8fd/go.mod h1:vdPya4AIcDjvng4ViaAzqjegJf0VHYpYHQguFx5xBp0=
github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8=
github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs=
github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo=
Expand Down
2 changes: 1 addition & 1 deletion lib/http/middleware.go
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ func MiddlewareCORS(allowOrigin string) Middleware {

if allowOrigin != "" {
w.Header().Add("Access-Control-Allow-Origin", allowOrigin)
w.Header().Add("Access-Control-Allow-Headers", "authorization, Content-Type")
w.Header().Add("Access-Control-Allow-Headers", "Authorization, Content-Type, Depth, Destination, If, Lock-Token, Overwrite, TimeOut, Translate")
w.Header().Add("Access-Control-Allow-Methods", "COPY, DELETE, GET, HEAD, LOCK, MKCOL, MOVE, OPTIONS, POST, PROPFIND, PROPPATCH, PUT, TRACE, UNLOCK")
w.Header().Add("Access-Control-Max-Age", "86400")
}
Expand Down
Loading