diff --git a/.github/workflows/verify_python.sh b/.github/workflows/verify_python.sh
index 863ecd43f..f2f0ad492 100755
--- a/.github/workflows/verify_python.sh
+++ b/.github/workflows/verify_python.sh
@@ -5,6 +5,7 @@ source venv/bin/activate
pip install mypy
pip install pytest
pip install types-requests
+pip install types-Deprecated
pip install ./client
echo "Checking tests files..."
python -m mypy tests --exclude=venv --ignore-missing-imports
diff --git a/client/src/cbltest/api/syncgateway.py b/client/src/cbltest/api/syncgateway.py
index 0fa3c6d5d..ff4fa9e33 100644
--- a/client/src/cbltest/api/syncgateway.py
+++ b/client/src/cbltest/api/syncgateway.py
@@ -14,8 +14,11 @@
from cbltest.assertions import _assert_not_null
from cbltest.httplog import get_next_writer
from cbltest.jsonhelper import _get_typed_required
-from cbltest.logging import cbl_warning
+from cbltest.logging import cbl_warning, cbl_info
from cbltest.version import VERSION
+from cbltest.utils import assert_not_null
+
+from deprecated import deprecated
class _CollectionMap(JSONSerializable):
@@ -172,12 +175,38 @@ class DocumentUpdateEntry(JSONSerializable):
For creating a new document, set revid to None.
"""
+ @property
+ @deprecated("Only should be used until 4.0 SGW gets close to GA")
+ def id(self) -> str:
+ """
+ Gets the ID of the entry (NOTE: Will go away after 4.0 SGW gets close to GA)
+ """
+ return cast(str, self.__body["_id"])
+
+ @property
+ @deprecated("Only should be used until 4.0 SGW gets close to GA")
+ def rev(self) -> Optional[str]:
+ """
+ Gets the rev ID of the entry (NOTE: Will go away after 4.0 SGW gets close to GA)
+ """
+ if not "_rev" in self.__body:
+ return None
+
+ return cast(str, self.__body["_rev"])
+
def __init__(self, id: str, revid: Optional[str], body: dict):
self.__body = body.copy()
self.__body["_id"] = id
if revid:
self.__body["_rev"] = revid
+ @deprecated("Only should be used until 4.0 SGW gets close to GA")
+ def swap_rev(self, revid: str) -> None:
+ """
+ Changes the revid to the provided one (NOTE: Will go away after 4.0 SGW gets close to GA)
+ """
+ self.__body["_rev"] = revid
+
def to_json(self) -> Any:
return self.__body
@@ -193,14 +222,28 @@ def id(self) -> str:
return self.__id
@property
- def revid(self) -> str:
+ def revid(self) -> Optional[str]:
"""Gets the revision ID of the document"""
return self.__rev
+
+ @property
+ def cv(self) -> Optional[str]:
+ """Gets the CV of the document"""
+ return self.__cv
@property
def body(self) -> dict:
"""Gets the body of the document"""
return self.__body
+
+ @property
+ def revision(self) -> str:
+ """Gets either the CV (preferred) or revid of the document"""
+ if self.__cv is not None:
+ return self.__cv
+
+ assert self.__rev is not None
+ return self.__rev
def __init__(self, body: dict) -> None:
if "error" in body:
@@ -208,14 +251,18 @@ def __init__(self, body: dict) -> None:
self.__body = body.copy()
self.__id = cast(str, body["_id"])
- self.__rev = cast(str, body["_rev"])
+ self.__rev = cast(str, body["_rev"]) if "_rev" in body else None
+ self.__cv = cast(str, body["_cv"]) if "_cv" in body else None
del self.__body["_id"]
del self.__body["_rev"]
+ if self.__cv is not None:
+ del self.__body["_cv"]
def to_json(self) -> Any:
ret_val = self.__body.copy()
ret_val["_id"] = self.__id
ret_val["_rev"] = self.__rev
+ ret_val["_cv"] = self.__cv
return ret_val
@@ -339,6 +386,19 @@ def replication_url(self, db_name: str):
"""
_assert_not_null(db_name, nameof(db_name))
return urljoin(self.__replication_url, db_name)
+
+ async def _put_database(self, db_name: str, payload: PutDatabasePayload, retry_count: int = 0) -> None:
+ with self.__tracer.start_as_current_span("put_database",
+ attributes={"cbl.database.name": db_name}) as current_span:
+ try:
+ await self._send_request("put", f"/{db_name}/", payload)
+ except CblSyncGatewayBadResponseError as e:
+ if e.code == 500 and retry_count < 10:
+ cbl_warning(f"Sync gateway returned 500 from PUT database call, retrying ({retry_count + 1})...")
+ current_span.add_event("SGW returned 500, retry")
+ await self._put_database(db_name, payload, retry_count + 1)
+ else:
+ raise
async def put_database(self, db_name: str, payload: PutDatabasePayload) -> None:
"""
@@ -347,17 +407,7 @@ async def put_database(self, db_name: str, payload: PutDatabasePayload) -> None:
:param db_name: The name of the DB to create
:param payload: The options for the DB to create
"""
- with self.__tracer.start_as_current_span("put_database",
- attributes={"cbl.database.name": db_name}) as current_span:
- try:
- await self._send_request("put", f"/{db_name}", payload)
- except CblSyncGatewayBadResponseError as e:
- if e.code == 500:
- cbl_warning("Sync gateway returned 500 from PUT database call, retrying...")
- current_span.add_event("SGW returned 500, retry")
- await self.put_database(db_name, payload)
- else:
- raise
+ await self._put_database(db_name, payload, 0)
async def delete_database(self, db_name: str) -> None:
"""
@@ -370,7 +420,7 @@ async def delete_database(self, db_name: str) -> None:
:param db_name: The name of the Database to delete
"""
with self.__tracer.start_as_current_span("delete_database", attributes={"cbl.database.name": db_name}):
- await self._send_request("delete", f"/{db_name}")
+ await self._send_request("delete", f"/{db_name}/")
def create_collection_access_dict(self, input: Dict[str, List[str]]) -> dict:
"""
@@ -527,6 +577,28 @@ async def get_all_documents(self, db_name: str, scope: str = "_default",
resp = await self._send_request("get", f"/{db_name}.{scope}.{collection}/_all_docs?show_cv=true")
assert isinstance(resp, dict)
return AllDocumentsResponse(cast(dict, resp))
+
+ @deprecated("Only should be used until 4.0 SGW gets close to GA")
+ async def _rewrite_rev_ids(self, db_name: str, updates: List[DocumentUpdateEntry],
+ scope: str, collection: str) -> None:
+ all_docs_body = list(u.id for u in updates if u.rev is not None)
+ all_docs_response = await self._send_request("post", f"/{db_name}.{scope}.{collection}/_all_docs",
+ JSONDictionary({"keys": all_docs_body}))
+
+ if not isinstance(all_docs_response, dict):
+ raise ValueError("Inappropriate response from sync gateway _all_docs (not JSON dict)")
+
+ rows = cast(dict, all_docs_response)["rows"]
+ if not isinstance(rows, list):
+ raise ValueError("Inappropriate response from sync gateway _all_docs (rows not a list)")
+
+ for r in cast(list, rows):
+ next_id = r["id"]
+ found = assert_not_null(next((u for u in updates if u.id == next_id), None),
+ f"Unable to find {next_id} in updates!")
+ new_rev_id = r["value"]["rev"]
+ cbl_info(f"For document {found.id}: Swapping revid from {found.rev} to {new_rev_id}")
+ found.swap_rev(new_rev_id)
async def update_documents(self, db_name: str, updates: List[DocumentUpdateEntry],
scope: str = "_default", collection: str = "_default") -> None:
@@ -541,12 +613,26 @@ async def update_documents(self, db_name: str, updates: List[DocumentUpdateEntry
with self.__tracer.start_as_current_span("update_documents", attributes={"cbl.database.name": db_name,
"cbl.scope.name": scope,
"cbl.collection.name": collection}):
+
+ await self._rewrite_rev_ids(db_name, updates, scope, collection)
+
+
+
body = {
"docs": list(u.to_json() for u in updates)
}
await self._send_request("post", f"/{db_name}.{scope}.{collection}/_bulk_docs",
JSONDictionary(body))
+
+ @deprecated("Only should be used until 4.0 SGW gets close to GA")
+ async def _replaced_revid(self, doc_id: str, revid: str, db_name: str, scope: str, collection: str) -> str:
+ response = await self._send_request("get", f"/{db_name}.{scope}.{collection}/{doc_id}?show_cv=true")
+ assert isinstance(response, dict)
+ response_dict = cast(dict, response)
+ assert revid == response_dict["_cv"] or revid == response_dict["_rev"]
+ return cast(dict, response)["_rev"]
+
async def delete_document(self, doc_id: str, revid: str, db_name: str, scope: str = "_default",
collection: str = "_default") -> None:
@@ -563,8 +649,13 @@ async def delete_document(self, doc_id: str, revid: str, db_name: str, scope: st
"cbl.scope.name": scope,
"cbl.collection.name": collection,
"cbl.document.id": doc_id}):
+ if "@" in revid:
+ new_rev_id = await self._replaced_revid(doc_id, revid, db_name, scope, collection)
+ else:
+ new_rev_id = revid
+
await self._send_request("delete", f"/{db_name}.{scope}.{collection}/{doc_id}",
- params={"rev": revid})
+ params={"rev": new_rev_id})
async def purge_document(self, doc_id: str, db_name: str, scope: str = "_default",
collection: str = "_default") -> None:
@@ -601,7 +692,7 @@ async def get_document(self, db_name: str, doc_id: str, scope: str = "_default",
"cbl.scope.name": scope,
"cbl.collection.name": collection,
"cbl.document.id": doc_id}):
- response = await self._send_request("get", f"/{db_name}.{scope}.{collection}/{doc_id}")
+ response = await self._send_request("get", f"/{db_name}.{scope}.{collection}/{doc_id}?show_cv=true")
if not isinstance(response, dict):
raise ValueError("Inappropriate response from sync gateway get /doc (not JSON)")
diff --git a/client/src/cbltest/utils.py b/client/src/cbltest/utils.py
index 0a9ec50f6..62eb33d32 100644
--- a/client/src/cbltest/utils.py
+++ b/client/src/cbltest/utils.py
@@ -1,5 +1,5 @@
import time
-from typing import Any, Callable, Dict, TypeVar, Type, Union
+from typing import Any, Callable, Dict, TypeVar, Type, Union, Optional, cast
from .api.error import CblTimeoutError
@@ -27,3 +27,7 @@ def _try_n_times(num_times: int,
print(f"Trying {func.__name__} failed (reason='{e}')")
raise CblTimeoutError(f"Failed to call {func.__name__} after {num_times} attempts!")
+
+def assert_not_null(input: Optional[T], msg: str) -> T:
+ assert input is not None, msg
+ return cast(T, input)
\ No newline at end of file
diff --git a/environment/sg/Dockerfile b/environment/sg/Dockerfile
index 673eabb4c..7be1728ff 100644
--- a/environment/sg/Dockerfile
+++ b/environment/sg/Dockerfile
@@ -4,9 +4,6 @@ SHELL ["/bin/bash", "-c"]
RUN apt -yqq update
RUN apt -yqq install curl systemctl
RUN mkdir -p /opt/sg
-COPY cert /opt/sg/cert
-COPY config /opt/sg/config
-COPY start-sgw.sh /opt/sg
ARG SG_DEB
RUN [ -z "$SG_DEB" ] && echo "SG_DEB is required" && exit 1 || true
@@ -20,9 +17,12 @@ RUN ARCHITECTURE="$(dpkg --print-architecture)" && \
exit 1; \
fi
-WORKDIR /opt/sg
-RUN dpkg -i ./couchbase-sync-gateway.deb
+RUN dpkg -i /opt/sg/couchbase-sync-gateway.deb
+COPY cert /opt/sg/cert
+COPY config /opt/sg/config
+COPY start-sgw.sh /opt/sg
+WORKDIR /opt/sg
EXPOSE 4984
EXPOSE 4985
diff --git a/environment/sg/config/bootstrap-nonssl.json b/environment/sg/config/bootstrap-nonssl.json
index 188030b8c..96529a244 100644
--- a/environment/sg/config/bootstrap-nonssl.json
+++ b/environment/sg/config/bootstrap-nonssl.json
@@ -15,6 +15,14 @@
"enabled": true,
"log_level": "info",
"log_keys": ["*"]
+ },
+ "log_file_path": "/opt/sg/log",
+ "debug": {
+ "enabled": true,
+ "rotation": {
+ "max_size": 512,
+ "rotated_logs_size_limit": 1024
+ }
}
}
}
\ No newline at end of file
diff --git a/environment/sg/config/bootstrap.json b/environment/sg/config/bootstrap.json
index 7829cae29..51bb83d0b 100644
--- a/environment/sg/config/bootstrap.json
+++ b/environment/sg/config/bootstrap.json
@@ -19,6 +19,14 @@
"enabled": true,
"log_level": "info",
"log_keys": ["*"]
+ },
+ "log_file_path": "/opt/sg/log",
+ "debug": {
+ "enabled": true,
+ "rotation": {
+ "max_size": 512,
+ "rotated_logs_size_limit": 1024
+ }
}
}
}
\ No newline at end of file
diff --git a/servers/dotnet/testserver.logic/Handlers/NewSessionHandler.cs b/servers/dotnet/testserver.logic/Handlers/NewSessionHandler.cs
index 3629675e2..312af5acc 100644
--- a/servers/dotnet/testserver.logic/Handlers/NewSessionHandler.cs
+++ b/servers/dotnet/testserver.logic/Handlers/NewSessionHandler.cs
@@ -91,6 +91,7 @@ public static Task NewSessionHandler(int version, JsonDocument body, HttpListene
}
Log.Logger = new LoggerConfiguration()
+ .MinimumLevel.Verbose()
.WriteTo.Logger(Original)
.WriteTo.LogSlurp(newSessionBody.logging.url, newSessionBody.id, newSessionBody.logging.tag)
.CreateLogger();
diff --git a/servers/dotnet/testserver/MauiProgram.cs b/servers/dotnet/testserver/MauiProgram.cs
index 7b3d9b847..7402d21aa 100644
--- a/servers/dotnet/testserver/MauiProgram.cs
+++ b/servers/dotnet/testserver/MauiProgram.cs
@@ -29,7 +29,7 @@ public static MauiApp CreateMauiApp()
LogFilePath = $"{Path.GetTempFileName()}.txt";
var logConfig = new LoggerConfiguration()
- .MinimumLevel.Debug()
+ .MinimumLevel.Verbose()
.WriteTo.File(LogFilePath)
.WriteTo.Console(restrictedToMinimumLevel: LogEventLevel.Warning);
diff --git a/servers/dotnet/testserver/testserver.csproj b/servers/dotnet/testserver/testserver.csproj
index ac7b8cfeb..5d60d85c6 100644
--- a/servers/dotnet/testserver/testserver.csproj
+++ b/servers/dotnet/testserver/testserver.csproj
@@ -42,6 +42,8 @@
true
android-arm64;android-arm
maccatalyst-x64
+
+ true
diff --git a/tests/test_basic_replication.py b/tests/test_basic_replication.py
index 2e192022c..8b392b2f5 100644
--- a/tests/test_basic_replication.py
+++ b/tests/test_basic_replication.py
@@ -12,6 +12,7 @@
from cbltest.api.replicator_types import ReplicatorBasicAuthenticator, ReplicatorDocumentFlags
from cbltest.api.syncgateway import DocumentUpdateEntry
from cbltest.api.test_functions import compare_local_and_remote
+from cbltest.utils import assert_not_null
class TestBasicReplication(CBLTestClass):
@@ -325,7 +326,8 @@ async def test_continuous_pull(self, cblpytest: CBLPyTest, dataset_path: Path):
hotels_all_docs = await cblpytest.sync_gateways[0].get_all_documents("travel", "travel", "hotels")
for doc in hotels_all_docs.rows:
if doc.id == "hotel_400" or doc.id == "hotel_500":
- await cblpytest.sync_gateways[0].delete_document(doc.id, doc.revision, "travel", "travel", "hotels")
+ revid = assert_not_null(doc.revid, f"Missing revid on {doc.id}")
+ await cblpytest.sync_gateways[0].delete_document(doc.id, revid, "travel", "travel", "hotels")
self.mark_test_step("Wait until receiving all document replication events")
await replicator.wait_for_all_doc_events({
@@ -459,7 +461,8 @@ async def test_continuous_push_and_pull(self, cblpytest: CBLPyTest, dataset_path
hotels_all_docs = await cblpytest.sync_gateways[0].get_all_documents("travel", "travel", "hotels")
for doc in hotels_all_docs.rows:
if doc.id == "hotel_400" or doc.id == "hotel_500":
- await cblpytest.sync_gateways[0].delete_document(doc.id, doc.revision, "travel", "travel", "hotels")
+ revid = assert_not_null(doc.revid, f"Missing revid on {doc.id}")
+ await cblpytest.sync_gateways[0].delete_document(doc.id, revid, "travel", "travel", "hotels")
self.mark_test_step("Wait until receiving all document replication events")
await replicator.wait_for_all_doc_events({
diff --git a/tests/test_replication_behavior.py b/tests/test_replication_behavior.py
index d0eecc5a5..de1d4865c 100644
--- a/tests/test_replication_behavior.py
+++ b/tests/test_replication_behavior.py
@@ -1,5 +1,6 @@
from pathlib import Path
from cbltest import CBLPyTest
+from cbltest.utils import assert_not_null
from cbltest.api.cloud import CouchbaseCloud
from cbltest.api.replicator import Replicator
from cbltest.api.replicator_types import ReplicatorCollectionEntry, ReplicatorType, \
@@ -19,7 +20,8 @@ async def test_pull_empty_database_active_only(self, cblpytest: CBLPyTest, datas
for row in all_docs.rows:
name_number = int(row.id[-3:])
if name_number <= 150:
- await cblpytest.sync_gateways[0].delete_document(row.id, row.revision, "names")
+ revid = assert_not_null(row.revid, f"Missing revid on {row.id}")
+ await cblpytest.sync_gateways[0].delete_document(row.id, revid, "names")
self.mark_test_step("Reset local database, and load `empty` dataset")
dbs = await cblpytest.test_servers[0].create_and_reset_db(["db1"])
diff --git a/tests/test_replication_blob.py b/tests/test_replication_blob.py
index a46745d70..a53319dee 100644
--- a/tests/test_replication_blob.py
+++ b/tests/test_replication_blob.py
@@ -2,18 +2,20 @@
from typing import List
import pytest
from cbltest import CBLPyTest
+from cbltest.utils import assert_not_null
from cbltest.api.cloud import CouchbaseCloud
from cbltest.api.database import SnapshotUpdater
from cbltest.api.database_types import MaintenanceType, DocumentEntry
from cbltest.api.replicator import Replicator, ReplicatorType, ReplicatorCollectionEntry, ReplicatorActivityLevel
from cbltest.api.replicator_types import ReplicatorBasicAuthenticator
-from cbltest.api.syncgateway import DocumentUpdateEntry
+from cbltest.api.syncgateway import DocumentUpdateEntry, RemoteDocument
from cbltest.api.test_functions import compare_local_and_remote
from cbltest.api.cbltestclass import CBLTestClass
class TestReplicationBlob(CBLTestClass):
@pytest.mark.cbse(14861)
@pytest.mark.asyncio(loop_scope="session")
+ @pytest.mark.skip(reason="CBG-4389")
async def test_pull_non_blob_changes_with_delta_sync_and_compact(self, cblpytest: CBLPyTest, dataset_path: Path):
self.mark_test_step("Reset SG and load `travel` dataset with delta sync enabled.")
cloud = CouchbaseCloud(cblpytest.sync_gateways[0], cblpytest.couchbase_servers[0])
@@ -48,8 +50,10 @@ async def test_pull_non_blob_changes_with_delta_sync_and_compact(self, cblpytest
["travel.hotels"])
self.mark_test_step("Update hotel_1 on SG without changing the image key.")
+ hotel_1 = assert_not_null(await cblpytest.sync_gateways[0].get_document("travel", "hotel_1", "travel", "hotels"),
+ "hotel_1 vanished from SGW")
hotels_updates: List[DocumentUpdateEntry] = []
- hotels_updates.append(DocumentUpdateEntry("hotel_1", "1-2888d379591e42370912510ae8e8a976e1bf6436", body={
+ hotels_updates.append(DocumentUpdateEntry("hotel_1", hotel_1.revision, body={
"_attachments": {
"blob_/image": {
"content_type": "image/png",
@@ -84,8 +88,10 @@ async def test_pull_non_blob_changes_with_delta_sync_and_compact(self, cblpytest
["travel.hotels"])
self.mark_test_step("Update hotel_1 on SG again without changing the image key.")
+ hotel_1 = assert_not_null(await cblpytest.sync_gateways[0].get_document("travel", "hotel_1", "travel", "hotels"),
+ "hotel_1 vanished from SGW")
hotels_updates = []
- hotels_updates.append(DocumentUpdateEntry("hotel_1", "2-9a718e02f5e5aa1aa90bdbb25072d258", body={
+ hotels_updates.append(DocumentUpdateEntry("hotel_1", hotel_1.revision, body={
"_attachments": {
"blob_/image": {
"content_type": "image/png",
diff --git a/tests/test_replication_filter.py b/tests/test_replication_filter.py
index 44a4da070..482f178ae 100644
--- a/tests/test_replication_filter.py
+++ b/tests/test_replication_filter.py
@@ -1,6 +1,7 @@
from pathlib import Path
from typing import List, Set
from cbltest import CBLPyTest
+from cbltest.utils import assert_not_null
from cbltest.api.replicator import Replicator
from cbltest.api.replicator_types import (ReplicatorCollectionEntry, ReplicatorBasicAuthenticator, ReplicatorType,
ReplicatorActivityLevel, ReplicatorDocumentEntry, ReplicatorFilter)
@@ -130,13 +131,14 @@ async def test_pull_document_ids_filter(self, cblpytest: CBLPyTest, dataset_path
remote_landmark_10 = await cblpytest.sync_gateways[0].get_document("travel", "landmark_10", "travel", "landmarks")
assert remote_landmark_10 is not None, "Missing landmark_10 from sync gateway"
+ landmark_10_revid = assert_not_null(remote_landmark_10.revid, "Missing landmark_10 revid")
updates = [
DocumentUpdateEntry("airport_1000", None, {"answer": 42}),
DocumentUpdateEntry("airport_10", remote_airport_10.revid, {"answer": 42})
]
await cblpytest.sync_gateways[0].update_documents("travel", updates, "travel", "airports")
- await cblpytest.sync_gateways[0].delete_document("landmark_10", remote_landmark_10.revid, "travel", "travel", "landmarks")
+ await cblpytest.sync_gateways[0].delete_document("landmark_10", landmark_10_revid, "travel", "travel", "landmarks")
self.mark_test_step("Start the replicator with the same config as the step 3.")
replicator.clear_document_updates()
@@ -207,9 +209,11 @@ async def test_pull_channels_filter(self, cblpytest: CBLPyTest, dataset_path: Pa
remote_landmark_1 = await cblpytest.sync_gateways[0].get_document("travel", "landmark_1", "travel", "landmarks")
assert remote_landmark_1 is not None, "Missing landmark_1 from sync gateway"
+ landmark_1_revid = assert_not_null(remote_landmark_1.revid, "Missing landmark_1 revid")
remote_landmark_601 = await cblpytest.sync_gateways[0].get_document("travel", "landmark_601", "travel", "landmarks")
assert remote_landmark_601 is not None, "Missing landmark_601 from sync gateway"
+ landmark_601_revid = assert_not_null(remote_landmark_601.revid, "Missing landmark_601 revid")
updates = [
DocumentUpdateEntry("airport_1000", None, {"answer": 42, "channels": ["United Kingdom"]}),
@@ -221,8 +225,8 @@ async def test_pull_channels_filter(self, cblpytest: CBLPyTest, dataset_path: Pa
]
await cblpytest.sync_gateways[0].update_documents("travel", updates, "travel", "airports")
- await cblpytest.sync_gateways[0].delete_document("landmark_1", remote_landmark_1.revid, "travel", "travel", "landmarks")
- await cblpytest.sync_gateways[0].delete_document("landmark_601", remote_landmark_601.revid, "travel", "travel", "landmarks")
+ await cblpytest.sync_gateways[0].delete_document("landmark_1", landmark_1_revid, "travel", "travel", "landmarks")
+ await cblpytest.sync_gateways[0].delete_document("landmark_601", landmark_601_revid, "travel", "travel", "landmarks")
self.mark_test_step("Start the replicator with the same config as the step 3.")
replicator.clear_document_updates()
@@ -426,13 +430,15 @@ def repl_filter(x):
remote_name_10 = await cblpytest.sync_gateways[0].get_document("names", "name_105")
assert remote_name_10 is not None, "Missing name_105 from sync gateway"
+ name_10_revid = assert_not_null(remote_name_10.revid, "Missing name_105 revid")
remote_name_20 = await cblpytest.sync_gateways[0].get_document("names", "name_193")
assert remote_name_20 is not None, "Missing name_193 from sync gateway"
+ name_20_revid = assert_not_null(remote_name_20.revid, "Missing name_193 revid")
await cblpytest.sync_gateways[0].update_documents("names", updates)
- await cblpytest.sync_gateways[0].delete_document("name_105", remote_name_10.revid, "names")
- await cblpytest.sync_gateways[0].delete_document("name_193", remote_name_20.revid, "names")
+ await cblpytest.sync_gateways[0].delete_document("name_105", name_10_revid, "names")
+ await cblpytest.sync_gateways[0].delete_document("name_193", name_20_revid, "names")
self.mark_test_step("Start a replicator with the same config as in step 3.")
await replicator.start()