diff --git a/base/error.go b/base/error.go index 7365967e74..1290d57707 100644 --- a/base/error.go +++ b/base/error.go @@ -361,3 +361,28 @@ func (me *MultiError) ErrorOrNil() error { } return me } + +const syncFnDryRunErrorPrefix = "Error returned from Sync Function" + +// SyncFnDryRunError is returned when the sync function dry run returns an error. +// It wraps the original error for errors.Is and the type supports errors.As +type SyncFnDryRunError struct { + Err error +} + +func (e *SyncFnDryRunError) Error() string { + if e == nil { + return syncFnDryRunErrorPrefix + } + if e.Err == nil { + return syncFnDryRunErrorPrefix + } + return syncFnDryRunErrorPrefix + ": " + e.Err.Error() +} + +func (e *SyncFnDryRunError) Unwrap() error { + if e == nil { + return nil + } + return e.Err +} diff --git a/db/crud.go b/db/crud.go index a99b6e1863..0cdd363cf4 100644 --- a/db/crud.go +++ b/db/crud.go @@ -1694,90 +1694,43 @@ func (db *DatabaseCollectionWithUser) PutExistingRevWithBody(ctx context.Context } -// SyncFnDryrun Runs a document through the sync function and returns expiry, channels doc was placed in, access map for users, roles, handler errors and sync fn exceptions -func (db *DatabaseCollectionWithUser) SyncFnDryrun(ctx context.Context, body Body, docID string) (*channels.ChannelMapperOutput, error, error) { - doc := &Document{ - ID: docID, - _body: body, - } - oldDoc := doc - if docID != "" { - if docInBucket, err := db.GetDocument(ctx, docID, DocUnmarshalAll); err == nil { - oldDoc = docInBucket - if doc._body == nil { - body = oldDoc.Body(ctx) - doc._body = body - // If no body is given, use doc in bucket as doc with no old doc - oldDoc._body = nil - } - doc._body[BodyRev] = oldDoc.SyncData.GetRevTreeID() - } else { - return nil, err, nil - } - } else { - oldDoc._body = nil - } - - delete(body, BodyId) - - // Get the revision ID to match, and the new generation number: - matchRev, _ := body[BodyRev].(string) - generation, _ := ParseRevID(ctx, matchRev) - if generation < 0 { - return nil, base.HTTPErrorf(http.StatusBadRequest, "Invalid revision ID"), nil - } - generation++ - - // Create newDoc which will be used to pass around Body - newDoc := &Document{ - ID: docID, - } - // Pull out attachments - newDoc.SetAttachments(GetBodyAttachments(body)) - delete(body, BodyAttachments) - - delete(body, BodyRevisions) +// SyncFnDryrun Runs the given document body through a sync function and returns expiry, channels doc was placed in, +// access map for users, roles, handler errors and sync fn exceptions. +// If syncFn is provided, it will be used instead of the one configured on the database. +func (db *DatabaseCollectionWithUser) SyncFnDryrun(ctx context.Context, newDoc, oldDoc *Document, syncFn string) (*channels.ChannelMapperOutput, error) { - err := validateAPIDocUpdate(body) - if err != nil { - return nil, err, nil - } - bodyWithoutInternalProps, wasStripped := StripInternalProperties(body) - canonicalBytesForRevID, err := base.JSONMarshalCanonical(bodyWithoutInternalProps) - if err != nil { - return nil, err, nil - } - - // We needed to keep _deleted around in the body until we generated a rev ID, but now we can ditch it. - _, isDeleted := body[BodyDeleted] - if isDeleted { - delete(body, BodyDeleted) - } - - // and now we can finally update the newDoc body to be without any special properties - newDoc.UpdateBody(body) - - // If no special properties were stripped and document wasn't deleted, the canonical bytes represent the current - // body. In this scenario, store canonical bytes as newDoc._rawBody - if !wasStripped && !isDeleted { - newDoc._rawBody = canonicalBytesForRevID - } - - newRev := CreateRevIDWithBytes(generation, matchRev, canonicalBytesForRevID) - newDoc.RevID = newRev mutableBody, metaMap, _, err := db.prepareSyncFn(oldDoc, newDoc) if err != nil { base.InfofCtx(ctx, base.KeyDiagnostic, "Failed to prepare to run sync function: %v", err) - return nil, err, nil + return nil, err } syncOptions, err := MakeUserCtx(db.user, db.ScopeName, db.Name) if err != nil { - return nil, err, nil + return nil, err + } + var output *channels.ChannelMapperOutput + var syncErr error + if syncFn == "" { + output, err = db.ChannelMapper.MapToChannelsAndAccess(ctx, mutableBody, string(oldDoc._rawBody), metaMap, syncOptions) + if err != nil { + return nil, &base.SyncFnDryRunError{Err: err} + } + } else { + jsTimeout := time.Duration(base.DefaultJavascriptTimeoutSecs) * time.Second + syncRunner, err := channels.NewSyncRunner(ctx, syncFn, jsTimeout) + if err != nil { + return nil, fmt.Errorf("failed to create sync runner: %v", err) + } + jsOutput, err := syncRunner.Call(ctx, mutableBody, string(oldDoc._rawBody), metaMap, syncOptions) + if err != nil { + + return nil, &base.SyncFnDryRunError{Err: err} + } + output = jsOutput.(*channels.ChannelMapperOutput) } - output, err := db.ChannelMapper.MapToChannelsAndAccess(ctx, mutableBody, string(oldDoc._rawBody), metaMap, syncOptions) - return output, nil, err + return output, syncErr } // revTreeConflictCheck checks for conflicts in the rev tree history and returns the parent revid, currentRevIndex diff --git a/docs/api/paths/diagnostic/keyspace-sync.yaml b/docs/api/paths/diagnostic/keyspace-sync.yaml index d50981ecba..55d705869a 100644 --- a/docs/api/paths/diagnostic/keyspace-sync.yaml +++ b/docs/api/paths/diagnostic/keyspace-sync.yaml @@ -7,16 +7,40 @@ # the file licenses/APL2.txt. parameters: - $ref: ../../components/parameters.yaml#/keyspace -get: + - $ref: ../../components/parameters.yaml#/doc_id +post: summary: Run a doc body through the sync function and return sync data. description: |- - Run a document body through the sync function and return document sync data. + Runs a document body through the sync function and returns document sync + data. If no custom sync function is provided in the request body, the + default or user-defined sync function for the collection is used. + | Document | DocID | Behaviour | + | -------- | ----- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | + | Yes | No | The document passed will be considered as newDoc and oldDoc will be empty | + | Yes | Yes | The document passed in the body will be newDoc and DocID will be read from the bucket/collection and will be passed as the oldDoc. If DocID doesn't exist, then oldDoc will be empty | + | No | No | Will throw an error | + | No | Yes | The docID will be passed in as the newDoc and oldDoc will be empty. If the document is not found, an error will be returned | + * Sync Gateway Application Read Only requestBody: content: application/json: schema: - $ref: ../../components/schemas.yaml#/Document + type: object + properties: + sync_function: + description: |- + A JavaScript function that defines custom access, channel, and + validation logic for documents. This function will be evaluated + by the Sync Gateway to determine document routing, access + grants, and validation outcomes during synchronization. + type: string + example: |- + function (doc, oldDoc) { + channel(doc.channels); + } + doc: + $ref: ../../components/schemas.yaml#/Document responses: '200': description: Document Processed by sync function successfully diff --git a/rest/diagnostic_doc_api.go b/rest/diagnostic_doc_api.go index de40d8e47e..fc37d595ef 100644 --- a/rest/diagnostic_doc_api.go +++ b/rest/diagnostic_doc_api.go @@ -11,6 +11,7 @@ licenses/APL2.txt. package rest import ( + "errors" "fmt" "net/http" @@ -33,6 +34,11 @@ type ImportFilterDryRun struct { Error string `json:"error"` } +type SyncFnDryRunPayload struct { + Function string `json:"sync_function"` + Doc db.Body `json:"doc,omitempty"` +} + func populateDocChannelInfo(doc db.Document) map[string][]auth.GrantHistorySequencePair { resp := make(map[string][]auth.GrantHistorySequencePair, len(doc.Channels)) @@ -70,20 +76,84 @@ func (h *handler) handleGetDocChannels() error { func (h *handler) handleSyncFnDryRun() error { docid := h.getQuery("doc_id") - body, err := h.readDocument() + var syncDryRunPayload SyncFnDryRunPayload + err := h.readJSONInto(&syncDryRunPayload) if err != nil { - if docid == "" { - return fmt.Errorf("no doc id provided for dry run and error reading body: %s", err) + return base.HTTPErrorf(http.StatusBadRequest, "Error reading sync function payload: %v", err) + } + + if syncDryRunPayload.Doc == nil && docid == "" { + return base.HTTPErrorf(http.StatusBadRequest, "no docid or document provided") + } + + oldDoc := &db.Document{ID: docid} + oldDoc.UpdateBody(syncDryRunPayload.Doc) + if docid != "" { + if docInbucket, err := h.collection.GetDocument(h.ctx(), docid, db.DocUnmarshalAll); err == nil { + oldDoc = docInbucket + if len(syncDryRunPayload.Doc) == 0 { + syncDryRunPayload.Doc = oldDoc.Body(h.ctx()) + oldDoc.UpdateBody(nil) + } + } else { + return base.HTTPErrorf(http.StatusNotFound, "Error reading document: %v", err) } + } else { + oldDoc.UpdateBody(nil) + } + + delete(syncDryRunPayload.Doc, db.BodyId) + + // Get the revision ID to match, and the new generation number: + matchRev, _ := syncDryRunPayload.Doc[db.BodyRev].(string) + generation, _ := db.ParseRevID(h.ctx(), matchRev) + if generation < 0 { + return base.HTTPErrorf(http.StatusBadRequest, "Invalid revision ID") + } + generation++ + + // Create newDoc which will be used to pass around Body + newDoc := &db.Document{ + ID: docid, } + // Pull attachments + newDoc.SetAttachments(db.GetBodyAttachments(syncDryRunPayload.Doc)) + delete(syncDryRunPayload.Doc, db.BodyAttachments) + delete(syncDryRunPayload.Doc, db.BodyRevisions) - output, err, syncFnErr := h.collection.SyncFnDryrun(h.ctx(), body, docid) + if _, ok := syncDryRunPayload.Doc[base.SyncPropertyName]; ok { + return base.HTTPErrorf(http.StatusBadRequest, "document-top level property '_sync' is a reserved internal property") + } + + db.StripInternalProperties(syncDryRunPayload.Doc) + + // We needed to keep _deleted around in the body until we generate rev ID, but now it can be removed + _, isDeleted := syncDryRunPayload.Doc[db.BodyDeleted] + if isDeleted { + delete(syncDryRunPayload.Doc, db.BodyDeleted) + } + + //update the newDoc body to be without any special properties + newDoc.UpdateBody(syncDryRunPayload.Doc) + + rawDocBytes, err := newDoc.BodyBytes(h.ctx()) if err != nil { - return err + return base.HTTPErrorf(http.StatusBadRequest, "Error marshalling document: %v", err) } - if syncFnErr != nil { + + newRev := db.CreateRevIDWithBytes(generation, matchRev, rawDocBytes) + newDoc.RevID = newRev + + output, err := h.collection.SyncFnDryrun(h.ctx(), newDoc, oldDoc, syncDryRunPayload.Function) + if err != nil { + var syncFnDryRunErr *base.SyncFnDryRunError + if !errors.As(err, &syncFnDryRunErr) { + return err + } + + errMsg := syncFnDryRunErr.Error() resp := SyncFnDryRun{ - Exception: syncFnErr.Error(), + Exception: errMsg, } h.writeJSON(resp) return nil diff --git a/rest/diagnostic_doc_api_test.go b/rest/diagnostic_doc_api_test.go index 183039bd23..6d615e4226 100644 --- a/rest/diagnostic_doc_api_test.go +++ b/rest/diagnostic_doc_api_test.go @@ -72,80 +72,18 @@ func TestGetDocDryRuns(t *testing.T) { defer rt.Close() bucket := rt.Bucket().GetName() ImportFilter := `"function(doc) { if (doc.user.num) { return true; } else { return false; } }"` - SyncFn := `"function(doc) {channel(doc.channel); access(doc.accessUser, doc.accessChannel); role(doc.accessUser, doc.role); expiry(doc.expiry);}"` - resp := rt.SendAdminRequest("PUT", "/db/", fmt.Sprintf( - `{"bucket":"%s", "num_index_replicas": 0, "enable_shared_bucket_access": %t, "sync":%s, "import_filter":%s}`, - bucket, base.TestUseXattrs(), SyncFn, ImportFilter)) - RequireStatus(t, resp, http.StatusCreated) - response := rt.SendDiagnosticRequest("GET", "/{{.keyspace}}/_sync", `{"accessChannel": ["dynamicChan5412"],"accessUser": "user","channel": ["dynamicChan222"],"expiry":10}`) - RequireStatus(t, response, http.StatusOK) - var respMap SyncFnDryRun - var respMapinit SyncFnDryRun - err := json.Unmarshal(response.BodyBytes(), &respMapinit) - assert.NoError(t, err) - assert.ElementsMatch(t, respMapinit.Exception, nil) - assert.Equal(t, respMapinit.Roles, channels.AccessMap{}) - assert.Equal(t, respMapinit.Access, channels.AccessMap{"user": channels.BaseSetOf(t, "dynamicChan5412")}) - assert.EqualValues(t, *respMapinit.Expiry, 10) - response = rt.SendDiagnosticRequest("GET", "/{{.keyspace}}/_sync", `{"role": ["role:role1"], "accessUser": "user"}`) - RequireStatus(t, response, http.StatusOK) - - err = json.Unmarshal(response.BodyBytes(), &respMap) - assert.NoError(t, err) - assert.Equal(t, respMap.Roles, channels.AccessMap{"user": channels.BaseSetOf(t, "role1")}) newSyncFn := `"function(doc,oldDoc){if (doc.user.num >= 100) {channel(doc.channel);} else {throw({forbidden: 'user num too low'});}if (oldDoc){ console.log(oldDoc); if (oldDoc.user.num > doc.user.num) { access(oldDoc.user.name, doc.channel);} else {access(doc.user.name[0], doc.channel);}}}"` - resp = rt.SendAdminRequest("POST", "/db/_config", fmt.Sprintf( + resp := rt.SendAdminRequest("PUT", "/db/", fmt.Sprintf( `{"bucket":"%s", "num_index_replicas": 0, "enable_shared_bucket_access": %t, "sync":%s, "import_filter":%s}`, bucket, base.TestUseXattrs(), newSyncFn, ImportFilter)) RequireStatus(t, resp, http.StatusCreated) - _ = rt.PutDoc("doc", `{"user":{"num":123, "name":["user1"]}, "channel":"channel1"}`) - - response = rt.SendDiagnosticRequest("GET", "/{{.keyspace}}/_sync", `{"user":{"num":23}}`) - RequireStatus(t, response, http.StatusOK) - - err = json.Unmarshal(response.BodyBytes(), &respMap) - assert.NoError(t, err) - assert.Equal(t, respMap.Exception, "403 user num too low") - - response = rt.SendDiagnosticRequest("GET", "/{{.keyspace}}/_sync?doc_id=doc", `{"user":{"num":150}, "channel":"abc"}`) - RequireStatus(t, response, http.StatusOK) - var newrespMap SyncFnDryRun - err = json.Unmarshal(response.BodyBytes(), &newrespMap) - assert.NoError(t, err) - assert.Equal(t, newrespMap.Exception, "TypeError: Cannot access member '0' of undefined") - - response = rt.SendDiagnosticRequest("GET", "/{{.keyspace}}/_sync?doc_id=doc", `{"user":{"num":120, "name":["user2"]}, "channel":"channel2"}`) - RequireStatus(t, response, http.StatusOK) - - err = json.Unmarshal(response.BodyBytes(), &respMap) - assert.NoError(t, err) - assert.Equal(t, respMap.Access, channels.AccessMap{"user1": channels.BaseSetOf(t, "channel2")}) - - // get doc from bucket with no body provided - response = rt.SendDiagnosticRequest("GET", "/{{.keyspace}}/_sync?doc_id=doc", ``) - RequireStatus(t, response, http.StatusOK) - err = json.Unmarshal(response.BodyBytes(), &respMap) - assert.NoError(t, err) - assert.Equal(t, respMap.Access, channels.AccessMap{"user1": channels.BaseSetOf(t, "channel1")}) - - // Get doc that doesnt exist, will error - response = rt.SendDiagnosticRequest("GET", "/{{.keyspace}}/_sync?doc_id=doc404", ``) - RequireStatus(t, response, http.StatusNotFound) - err = json.Unmarshal(response.BodyBytes(), &respMap) - assert.NoError(t, err) - assert.Equal(t, respMap.Exception, "") - - // no doc id no body, will error - response = rt.SendDiagnosticRequest("GET", "/{{.keyspace}}/_sync?doc_id=", ``) - RequireStatus(t, response, http.StatusInternalServerError) - // Import filter import=false and type error - response = rt.SendDiagnosticRequest("GET", "/{{.keyspace}}/_import_filter", `{"accessUser": "user"}`) + response := rt.SendDiagnosticRequest("GET", "/{{.keyspace}}/_import_filter", `{"accessUser": "user"}`) RequireStatus(t, response, http.StatusOK) var respMap2 ImportFilterDryRun - err = json.Unmarshal(response.BodyBytes(), &respMap2) + err := json.Unmarshal(response.BodyBytes(), &respMap2) assert.NoError(t, err) assert.Equal(t, respMap2.Error, "TypeError: Cannot access member 'num' of undefined") assert.False(t, respMap2.ShouldImport) @@ -194,25 +132,6 @@ func TestGetDocDryRuns(t *testing.T) { response = rt.SendDiagnosticRequest("GET", "/{{.keyspace}}/_import_filter?doc_id=doc2", `{"user":{"num":23}}`) RequireStatus(t, response, http.StatusBadRequest) - newSyncFn = `"function(doc,oldDoc){if(oldDoc){ channel(oldDoc.channel)} else {channel(doc.channel)} }"` - resp = rt.SendAdminRequest("POST", "/db/_config", fmt.Sprintf( - `{"bucket":"%s", "num_index_replicas": 0, "enable_shared_bucket_access": %t, "sync":%s, "import_filter":%s}`, - bucket, base.TestUseXattrs(), newSyncFn, ImportFilter)) - RequireStatus(t, resp, http.StatusCreated) - _ = rt.PutDoc("doc22", `{"chan1":"channel1", "channel":"chanOld"}`) - - response = rt.SendDiagnosticRequest("GET", "/{{.keyspace}}/_sync", `{"channel":"channel2"}`) - RequireStatus(t, response, http.StatusOK) - - err = json.Unmarshal(response.BodyBytes(), &respMap) - assert.NoError(t, err) - assert.EqualValues(t, respMap.Channels.ToArray(), []string{"channel2"}) - - response = rt.SendDiagnosticRequest("GET", "/{{.keyspace}}/_sync?doc_id=doc22", `{"channel":"chanNew"}`) - RequireStatus(t, response, http.StatusOK) - err = json.Unmarshal(response.BodyBytes(), &newrespMap) - assert.NoError(t, err) - assert.EqualValues(t, newrespMap.Channels.ToArray(), []string{"chanOld"}) } func TestGetUserDocAccessSpan(t *testing.T) { @@ -1079,3 +998,257 @@ func TestGetUserDocAccessDuplicates(t *testing.T) { RequireStatus(rt.TB(), response, http.StatusOK) require.JSONEq(rt.TB(), rt.mustTemplateResource(expectedOutput), response.BodyString()) } + +// Tests the Diagnostic Endpoint to dry run Sync Function +func TestSyncFuncDryRun(t *testing.T) { + base.SkipImportTestsIfNotEnabled(t) + base.SetUpTestLogging(t, base.LevelDebug, base.KeyAll) + + tests := []struct { + name string + dbSyncFunction string + syncFunction string + document any + docID string + existingDoc bool + existingDocID string + existingDocBody string + expectedOutput SyncFnDryRun + expectedStatus int + }{ + { + name: "custom_sync_func-db_sync_func-doc_body-no_doc_id-no_existing_doc", + dbSyncFunction: "function(doc) {channel(doc.channel); access(doc.accessUser, doc.accessChannel); role(doc.accessUser, doc.role); expiry(doc.expiry);}", + syncFunction: "function(doc) {channel(doc.channel); access(doc.accessUser, doc.accessChannel); role(doc.accessUser, doc.role); expiry(doc.expiry);}", + document: map[string]any{"accessChannel": []string{"dynamicChan5412"}, "accessUser": "user", "channel": []string{"dynamicChan222"}, "expiry": 10}, + existingDoc: false, + expectedOutput: SyncFnDryRun{ + Channels: base.SetFromArray([]string{"dynamicChan222"}), + Access: channels.AccessMap{"user": channels.BaseSetOf(t, "dynamicChan5412")}, + Roles: channels.AccessMap{}, + Expiry: base.Ptr(uint32(10)), + }, + expectedStatus: http.StatusOK, + }, + { + name: "custom_sync_func-db_sync_func-doc_body-no_doc_id-no_existing_doc-role", + dbSyncFunction: "function(doc) {channel(doc.channel); access(doc.accessUser, doc.accessChannel); role(doc.accessUser, doc.role); expiry(doc.expiry);}", + syncFunction: "function(doc) {channel(doc.channel); access(doc.accessUser, doc.accessChannel); role(doc.accessUser, doc.role); expiry(doc.expiry);}", + document: map[string]any{"role": []string{"role:role1"}, "accessUser": "user"}, + existingDoc: false, + expectedOutput: SyncFnDryRun{ + Channels: base.SetFromArray([]string{}), + Access: channels.AccessMap{}, + Roles: channels.AccessMap{"user": channels.BaseSetOf(t, "role1")}, + }, + expectedStatus: http.StatusOK, + }, + { + name: "custom_sync_func-no_db_sync_func-doc_body-no_existing_doc-no_doc_id", + dbSyncFunction: "", + syncFunction: "function(doc) {channel(doc.channel); access(doc.accessUser, doc.accessChannel); role(doc.accessUser, doc.role); expiry(doc.expiry);}", + document: map[string]any{"accessChannel": []string{"dynamicChan5412"}, "accessUser": "user", "channel": []string{"dynamicChan222"}, "expiry": 10}, + existingDoc: false, + expectedOutput: SyncFnDryRun{ + Channels: base.SetFromArray([]string{"dynamicChan222"}), + Access: channels.AccessMap{"user": channels.BaseSetOf(t, "dynamicChan5412")}, + Roles: channels.AccessMap{}, + Expiry: base.Ptr(uint32(10)), + }, + expectedStatus: http.StatusOK, + }, + { + name: "no_custom_sync_func-db_sync_func-doc_body-no_existing_doc-no_doc_id", + dbSyncFunction: "function(doc) {channel(doc.channel); access(doc.accessUser, doc.accessChannel); role(doc.accessUser, doc.role); expiry(doc.expiry);}", + syncFunction: "", + document: map[string]any{"accessChannel": []string{"dynamicChan5412"}, "accessUser": "user", "channel": []string{"dynamicChan222"}, "expiry": 10}, + existingDoc: false, + expectedOutput: SyncFnDryRun{ + Channels: base.SetFromArray([]string{"dynamicChan222"}), + Access: channels.AccessMap{"user": channels.BaseSetOf(t, "dynamicChan5412")}, + Roles: channels.AccessMap{}, + Expiry: base.Ptr(uint32(10)), + }, + expectedStatus: http.StatusOK, + }, + { + name: "custom_sync_func-db_sync_func-doc_body-existing_doc-no_doc_id", + dbSyncFunction: "function(doc) {channel(doc.channel); access(doc.accessUser, doc.accessChannel); role(doc.accessUser, doc.role); expiry(doc.expiry);}", + syncFunction: "function(doc) {channel(doc.channel); access(doc.accessUser, doc.accessChannel); role(doc.accessUser, doc.role); expiry(doc.expiry);}", + document: map[string]any{"accessChannel": []string{"dynamicChan5412"}, "accessUser": "user", "channel": []string{"dynamicChan222"}, "expiry": 10}, + existingDoc: true, + existingDocID: "doc1", + existingDocBody: `{"accessChannel": ["dynamicChan5412"],"accessUser": "user","channel": ["dynamicChan222"],"expiry":10}`, + expectedOutput: SyncFnDryRun{ + Channels: base.SetFromArray([]string{"dynamicChan222"}), + Access: channels.AccessMap{"user": channels.BaseSetOf(t, "dynamicChan5412")}, + Roles: channels.AccessMap{}, + Expiry: base.Ptr(uint32(10)), + }, + expectedStatus: http.StatusOK, + }, + { + name: "no_custom_sync_func-db_sync_func-doc_body-no_existing_doc-no_doc_id-sync_func_exception", + dbSyncFunction: "function(doc,oldDoc){if (doc.user.num >= 100) {channel(doc.channel);} else {throw({forbidden: 'user num too low'});}if (oldDoc){ console.log(oldDoc); if (oldDoc.user.num > doc.user.num) { access(oldDoc.user.name, doc.channel);} else {access(doc.user.name[0], doc.channel);}}}", + syncFunction: "", + document: map[string]any{"user": map[string]any{"num": 23}}, + existingDoc: false, + expectedOutput: SyncFnDryRun{ + Channels: base.SetFromArray([]string{}), + Access: channels.AccessMap{}, + Roles: channels.AccessMap{}, + Exception: "403 user num too low", + }, + expectedStatus: http.StatusOK, + }, + { + name: "no_custom_sync_func-db_sync_func-doc_body-no_existing_doc-doc_id-sync_func_exception_typeError", + dbSyncFunction: "function(doc,oldDoc){if (doc.user.num >= 100) {channel(doc.channel);} else {throw({forbidden: 'user num too low'});}if (oldDoc){ console.log(oldDoc); if (oldDoc.user.num > doc.user.num) { access(oldDoc.user.name, doc.channel);} else {access(doc.user.name[0], doc.channel);}}}", + syncFunction: "", + document: map[string]any{"user": map[string]any{"num": 150}, "channel": "abc"}, + docID: "doc", + existingDoc: true, + existingDocID: "doc", + existingDocBody: `{"user":{"num":123, "name":["user1"]}, "channel":"channel1"}`, + expectedOutput: SyncFnDryRun{ + Exception: "Error returned from Sync Function: TypeError: Cannot access member '0' of undefined", + }, + expectedStatus: http.StatusOK, + }, + { + name: "no_custom_sync_func-db_sync_func-doc_body-existing_doc-doc_id", + dbSyncFunction: "function(doc,oldDoc){if (doc.user.num >= 100) {channel(doc.channel);} else {throw({forbidden: 'user num too low'});}if (oldDoc){ console.log(oldDoc); if (oldDoc.user.num > doc.user.num) { access(oldDoc.user.name, doc.channel);} else {access(doc.user.name[0], doc.channel);}}}", + syncFunction: "", + document: map[string]any{"user": map[string]any{"num": 120, "name": []string{"user2"}}, "channel": "channel2"}, + docID: "doc", + existingDoc: true, + existingDocID: "doc", + existingDocBody: `{"user":{"num":123, "name":["user1"]}, "channel":"channel1"}`, + expectedOutput: SyncFnDryRun{ + Channels: base.SetFromArray([]string{"channel2"}), + Access: channels.AccessMap{"user1": channels.BaseSetOf(t, "channel2")}, + Roles: channels.AccessMap{}, + Exception: "", + }, + expectedStatus: http.StatusOK, + }, + { + name: "no_custom_sync_func-db_sync_func-no_doc_body-existing_doc-doc_id", + dbSyncFunction: "function(doc,oldDoc){if (doc.user.num >= 100) {channel(doc.channel);} else {throw({forbidden: 'user num too low'});}if (oldDoc){ console.log(oldDoc); if (oldDoc.user.num > doc.user.num) { access(oldDoc.user.name, doc.channel);} else {access(doc.user.name[0], doc.channel);}}}", + syncFunction: "", + docID: "doc", + existingDoc: true, + existingDocID: "doc", + existingDocBody: `{"user":{"num":123, "name":["user1"]}, "channel":"channel1"}`, + expectedOutput: SyncFnDryRun{ + Channels: base.SetFromArray([]string{"channel1"}), + Access: channels.AccessMap{}, + Roles: channels.AccessMap{}, + Exception: "", + }, + expectedStatus: http.StatusOK, + }, + { + name: "no_custom_sync_func-db_sync_func-no_doc_body-existing_doc-invalid_doc_id", + dbSyncFunction: "function(doc,oldDoc){if (doc.user.num >= 100) {channel(doc.channel);} else {throw({forbidden: 'user num too low'});}if (oldDoc){ console.log(oldDoc); if (oldDoc.user.num > doc.user.num) { access(oldDoc.user.name, doc.channel);} else {access(doc.user.name[0], doc.channel);}}}", + syncFunction: "", + docID: "doc404", + existingDoc: true, + existingDocID: "doc", + existingDocBody: `{"user":{"num":123, "name":["user1"]}, "channel":"channel1"}`, + expectedOutput: SyncFnDryRun{}, + expectedStatus: http.StatusNotFound, + }, + { + name: "no_custom_sync_func-db_sync_func-no_doc_body-existing_doc-no_doc_id", + dbSyncFunction: "function(doc,oldDoc){if (doc.user.num >= 100) {channel(doc.channel);} else {throw({forbidden: 'user num too low'});}if (oldDoc){ console.log(oldDoc); if (oldDoc.user.num > doc.user.num) { access(oldDoc.user.name, doc.channel);} else {access(doc.user.name[0], doc.channel);}}}", + syncFunction: "", + docID: "", + existingDoc: true, + existingDocID: "doc", + existingDocBody: `{"user":{"num":123, "name":["user1"]}, "channel":"channel1"}`, + expectedOutput: SyncFnDryRun{}, + expectedStatus: http.StatusBadRequest, + }, + { + name: "no_custom_sync_func-db_sync_func-doc_body-existing_doc-no_doc_id-new_doc_channel", + dbSyncFunction: "function(doc,oldDoc){if(oldDoc){ channel(oldDoc.channel)} else {channel(doc.channel)} }", + syncFunction: "", + document: map[string]any{"channel": "channel2"}, + docID: "", + existingDoc: true, + existingDocID: "doc22", + existingDocBody: `{"chan1":"channel1", "channel":"chanOld"}`, + expectedOutput: SyncFnDryRun{ + Channels: base.SetFromArray([]string{"channel2"}), + Access: channels.AccessMap{}, + Roles: channels.AccessMap{}, + }, + expectedStatus: http.StatusOK, + }, + { + name: "no_custom_sync_func-db_sync_func-doc_body-existing_doc-doc_id-old_doc_channel", + dbSyncFunction: "function(doc,oldDoc){if(oldDoc){ channel(oldDoc.channel)} else {channel(doc.channel)} }", + syncFunction: "", + document: map[string]any{"channel": "chanNew"}, + docID: "doc22", + existingDoc: true, + existingDocID: "doc22", + existingDocBody: `{"chan1":"channel1", "channel":"chanOld"}`, + expectedOutput: SyncFnDryRun{ + Channels: base.SetFromArray([]string{"chanOld"}), + Access: channels.AccessMap{}, + Roles: channels.AccessMap{}, + }, + expectedStatus: http.StatusOK, + }, + { + name: "no_custom_sync_func-db_sync_func-invalid_doc_body-existing_doc-doc_id", + dbSyncFunction: "function(doc,oldDoc){if(oldDoc){ channel(oldDoc.channel)} else {channel(doc.channel)} }", + syncFunction: "", + document: `{"channel": "chanNew", "oldchannel":}`, + docID: "doc22", + existingDoc: true, + existingDocID: "doc22", + existingDocBody: `{"chan1":"channel1", "channel":"chanOld"}`, + expectedOutput: SyncFnDryRun{}, + expectedStatus: http.StatusBadRequest, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + rt := NewRestTester(t, &RestTesterConfig{ + PersistentConfig: true, + SyncFn: test.dbSyncFunction, + }) + defer rt.Close() + + RequireStatus(t, rt.CreateDatabase("db", rt.NewDbConfig()), http.StatusCreated) + + url := "/{{.keyspace}}/_sync" + if test.existingDoc { + ver := rt.PutDoc(test.existingDocID, test.existingDocBody) + rt.WaitForVersion(test.existingDocID, ver) + } + if test.docID != "" { + url += "?doc_id=" + test.docID + } + bodyMap := make(map[string]interface{}) + if test.syncFunction != "" { + bodyMap["sync_function"] = test.syncFunction + } + if test.document != nil { + bodyMap["doc"] = test.document + } + bodyBytes, _ := json.Marshal(bodyMap) + resp := rt.SendDiagnosticRequest("POST", url, string(bodyBytes)) + RequireStatus(t, resp, test.expectedStatus) + + var output SyncFnDryRun + err := json.Unmarshal(resp.Body.Bytes(), &output) + assert.NoError(t, err) + assert.Equal(t, test.expectedOutput, output) + }) + } +} diff --git a/rest/routing.go b/rest/routing.go index 6504ddcc74..f93be27c1f 100644 --- a/rest/routing.go +++ b/rest/routing.go @@ -412,7 +412,7 @@ func createDiagnosticRouter(sc *ServerContext) *mux.Router { keyspace.StrictSlash(true) keyspace.Handle("/{docid:"+docRegex+"}/_all_channels", makeHandler(sc, adminPrivs, []Permission{PermReadAppData}, nil, (*handler).handleGetDocChannels)).Methods("GET") keyspace.Handle("/_user/{name}", makeHandler(sc, adminPrivs, []Permission{PermReadAppData}, nil, (*handler).handleGetUserDocAccessSpan)).Methods("GET") - keyspace.Handle("/_sync", makeHandler(sc, adminPrivs, []Permission{PermReadAppData}, nil, (*handler).handleSyncFnDryRun)).Methods("GET") + keyspace.Handle("/_sync", makeHandler(sc, adminPrivs, []Permission{PermReadAppData}, nil, (*handler).handleSyncFnDryRun)).Methods("POST") keyspace.Handle("/_import_filter", makeHandler(sc, adminPrivs, []Permission{PermReadAppData}, nil, (*handler).handleImportFilterDryRun)).Methods("GET") dbr.Handle("/_user/{name}/_all_channels", makeHandler(sc, adminPrivs, []Permission{PermReadPrincipal}, nil, (*handler).handleGetAllChannels)).Methods("GET")