Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 2 additions & 8 deletions google/cloud/firestore_v1/async_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@ async def execute(
transaction: "AsyncTransaction" | None = None,
read_time: datetime.datetime | None = None,
explain_options: PipelineExplainOptions | None = None,
index_mode: str | None = None,
additional_options: dict[str, Value | Constant] = {},
) -> PipelineSnapshot[PipelineResult]:
"""
Expand All @@ -87,10 +86,8 @@ async def execute(
explain_options (Optional[:class:`~google.cloud.firestore_v1.query_profile.PipelineExplainOptions`]):
Options to enable query profiling for this query. When set,
explain_metrics will be available on the returned list.
index_mode (Optional[str]): Configures the pipeline to require a certain type of indexes to be present.
Firestore will reject the request if there is not appropiate indexes to serve the query.
additional_options (Optional[dict[str, Value | Constant]]): Additional options to pass to the query.
These options will take precedence over method argument if there is a conflict (e.g. explain_options, index_mode)
These options will take precedence over method argument if there is a conflict (e.g. explain_options)
"""
kwargs = {k: v for k, v in locals().items() if k != "self"}
stream = AsyncPipelineStream(PipelineResult, self, **kwargs)
Expand All @@ -103,7 +100,6 @@ def stream(
read_time: datetime.datetime | None = None,
transaction: "AsyncTransaction" | None = None,
explain_options: PipelineExplainOptions | None = None,
index_mode: str | None = None,
additional_options: dict[str, Value | Constant] = {},
) -> AsyncPipelineStream[PipelineResult]:
"""
Expand All @@ -122,10 +118,8 @@ def stream(
explain_options (Optional[:class:`~google.cloud.firestore_v1.query_profile.PipelineExplainOptions`]):
Options to enable query profiling for this query. When set,
explain_metrics will be available on the returned generator.
index_mode (Optional[str]): Configures the pipeline to require a certain type of indexes to be present.
Firestore will reject the request if there is not appropiate indexes to serve the query.
additional_options (Optional[dict[str, Value | Constant]]): Additional options to pass to the query.
These options will take precedence over method argument if there is a conflict (e.g. explain_options, index_mode)
These options will take precedence over method argument if there is a conflict (e.g. explain_options)
"""
kwargs = {k: v for k, v in locals().items() if k != "self"}
return AsyncPipelineStream(PipelineResult, self, **kwargs)
10 changes: 2 additions & 8 deletions google/cloud/firestore_v1/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@ def execute(
transaction: "Transaction" | None = None,
read_time: datetime.datetime | None = None,
explain_options: PipelineExplainOptions | None = None,
index_mode: str | None = None,
additional_options: dict[str, Value | Constant] = {},
) -> PipelineSnapshot[PipelineResult]:
"""
Expand All @@ -84,10 +83,8 @@ def execute(
explain_options (Optional[:class:`~google.cloud.firestore_v1.query_profile.PipelineExplainOptions`]):
Options to enable query profiling for this query. When set,
explain_metrics will be available on the returned list.
index_mode (Optional[str]): Configures the pipeline to require a certain type of indexes to be present.
Firestore will reject the request if there is not appropiate indexes to serve the query.
additional_options (Optional[dict[str, Value | Constant]]): Additional options to pass to the query.
These options will take precedence over method argument if there is a conflict (e.g. explain_options, index_mode)
These options will take precedence over method argument if there is a conflict (e.g. explain_options)
"""
kwargs = {k: v for k, v in locals().items() if k != "self"}
stream = PipelineStream(PipelineResult, self, **kwargs)
Expand All @@ -100,7 +97,6 @@ def stream(
transaction: "Transaction" | None = None,
read_time: datetime.datetime | None = None,
explain_options: PipelineExplainOptions | None = None,
index_mode: str | None = None,
additional_options: dict[str, Value | Constant] = {},
) -> PipelineStream[PipelineResult]:
"""
Expand All @@ -119,10 +115,8 @@ def stream(
explain_options (Optional[:class:`~google.cloud.firestore_v1.query_profile.PipelineExplainOptions`]):
Options to enable query profiling for this query. When set,
explain_metrics will be available on the returned generator.
index_mode (Optional[str]): Configures the pipeline to require a certain type of indexes to be present.
Firestore will reject the request if there is not appropiate indexes to serve the query.
additional_options (Optional[dict[str, Value | Constant]]): Additional options to pass to the query.
These options will take precedence over method argument if there is a conflict (e.g. explain_options, index_mode)
These options will take precedence over method argument if there is a conflict (e.g. explain_options)
"""
kwargs = {k: v for k, v in locals().items() if k != "self"}
return PipelineStream(PipelineResult, self, **kwargs)
4 changes: 0 additions & 4 deletions google/cloud/firestore_v1/pipeline_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,6 @@ def __init__(
transaction: Transaction | AsyncTransaction | None,
read_time: datetime.datetime | None,
explain_options: PipelineExplainOptions | None,
index_mode: str | None,
additional_options: dict[str, Constant | Value],
):
# public
Expand All @@ -192,7 +191,6 @@ def __init__(
self._explain_stats: ExplainStats | None = None
self._explain_options: PipelineExplainOptions | None = explain_options
self._return_type = return_type
self._index_mode = index_mode
self._additonal_options = {
k: v if isinstance(v, Value) else v._to_pb()
for k, v in additional_options.items()
Expand Down Expand Up @@ -226,8 +224,6 @@ def _build_request(self) -> ExecutePipelineRequest:
options = {}
if self._explain_options:
options["explain_options"] = self._explain_options._to_value()
if self._index_mode:
options["index_mode"] = Value(string_value=self._index_mode)
if self._additonal_options:
options.update(self._additonal_options)
request = ExecutePipelineRequest(
Expand Down
4 changes: 1 addition & 3 deletions tests/system/test__helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,8 @@
EMULATOR_CREDS = EmulatorCreds()
FIRESTORE_EMULATOR = os.environ.get(_FIRESTORE_EMULATOR_HOST) is not None
FIRESTORE_OTHER_DB = os.environ.get("SYSTEM_TESTS_DATABASE", "system-tests-named-db")
FIRESTORE_ENTERPRISE_DB = os.environ.get("ENTERPRISE_DATABASE", "enterprise-db")
FIRESTORE_ENTERPRISE_DB = os.environ.get("ENTERPRISE_DATABASE", "enterprise-db-native")

# run all tests against default database, and a named database
TEST_DATABASES = [None, FIRESTORE_OTHER_DB]
TEST_DATABASES_W_ENTERPRISE = TEST_DATABASES + [FIRESTORE_ENTERPRISE_DB]
# TODO remove when kokoro fully supports enterprise mode/pipelines
IS_KOKORO_TEST = os.getenv("KOKORO_JOB_NAME") is not None
8 changes: 1 addition & 7 deletions tests/system/test_pipeline_acceptance.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,16 +33,10 @@

from google.cloud.firestore import Client, AsyncClient

from test__helpers import FIRESTORE_ENTERPRISE_DB, IS_KOKORO_TEST
from test__helpers import FIRESTORE_ENTERPRISE_DB

FIRESTORE_PROJECT = os.environ.get("GCLOUD_PROJECT")

# TODO: enable kokoro tests when internal test project is whitelisted
pytestmark = pytest.mark.skipif(
condition=IS_KOKORO_TEST,
reason="Pipeline tests are currently not supported by kokoro",
)

test_dir_name = os.path.dirname(__file__)

id_format = (
Expand Down
29 changes: 0 additions & 29 deletions tests/system/test_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@
ENTERPRISE_MODE_ERROR,
TEST_DATABASES,
TEST_DATABASES_W_ENTERPRISE,
IS_KOKORO_TEST,
FIRESTORE_ENTERPRISE_DB,
)

Expand All @@ -67,12 +66,6 @@ def _get_credentials_and_project():

@pytest.fixture(scope="session")
def database(request):
from test__helpers import FIRESTORE_ENTERPRISE_DB

# enterprise mode currently does not support RunQuery calls in prod on kokoro test project
# TODO: remove skip when kokoro test project supports full enterprise mode
if request.param == FIRESTORE_ENTERPRISE_DB and IS_KOKORO_TEST:
pytest.skip("enterprise mode does not support RunQuery on kokoro")
return request.param


Expand Down Expand Up @@ -101,11 +94,6 @@ def verify_pipeline(query):
"""
from google.cloud.firestore_v1.base_aggregation import BaseAggregationQuery

# return early on kokoro. Test project doesn't currently support pipelines
# TODO: enable pipeline verification when kokoro test project is whitelisted
if IS_KOKORO_TEST:
pytest.skip("skipping pipeline verification on kokoro")

def _clean_results(results):
if isinstance(results, dict):
return {k: _clean_results(v) for k, v in results.items()}
Expand Down Expand Up @@ -1771,22 +1759,6 @@ def test_pipeline_explain_options_using_additional_options(
assert "Execution:" in text_stats


@pytest.mark.skipif(
FIRESTORE_EMULATOR, reason="Query profile not supported in emulator."
)
@pytest.mark.parametrize("database", [FIRESTORE_ENTERPRISE_DB], indirect=True)
def test_pipeline_index_mode(database, query_docs):
"""test pipeline query with explicit index mode"""

collection, _, allowed_vals = query_docs
client = collection._client
query = collection.where(filter=FieldFilter("a", "==", 1))
pipeline = client.pipeline().create_from(query)
with pytest.raises(InvalidArgument) as e:
pipeline.execute(index_mode="fake_index")
assert "Invalid index_mode: fake_index" in str(e)


@pytest.mark.parametrize("database", TEST_DATABASES, indirect=True)
def test_query_stream_w_read_time(query_docs, cleanup, database):
collection, stored, allowed_vals = query_docs
Expand Down Expand Up @@ -1825,7 +1797,6 @@ def test_query_stream_w_read_time(query_docs, cleanup, database):
assert new_values[new_ref.id] == new_data


@pytest.mark.skipif(IS_KOKORO_TEST, reason="skipping pipeline verification on kokoro")
@pytest.mark.parametrize("database", [FIRESTORE_ENTERPRISE_DB], indirect=True)
def test_pipeline_w_read_time(query_docs, cleanup, database):
collection, stored, allowed_vals = query_docs
Expand Down
13 changes: 0 additions & 13 deletions tests/system/test_system_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@
ENTERPRISE_MODE_ERROR,
TEST_DATABASES,
TEST_DATABASES_W_ENTERPRISE,
IS_KOKORO_TEST,
FIRESTORE_ENTERPRISE_DB,
)

Expand Down Expand Up @@ -145,12 +144,6 @@ def _verify_explain_metrics_analyze_false(explain_metrics):

@pytest.fixture(scope="session")
def database(request):
from test__helpers import FIRESTORE_ENTERPRISE_DB

# enterprise mode currently does not support RunQuery calls in prod on kokoro test project
# TODO: remove skip when kokoro test project supports full enterprise mode
if request.param == FIRESTORE_ENTERPRISE_DB and IS_KOKORO_TEST:
pytest.skip("enterprise mode does not support RunQuery on kokoro")
return request.param


Expand Down Expand Up @@ -181,11 +174,6 @@ async def verify_pipeline(query):
"""
from google.cloud.firestore_v1.base_aggregation import BaseAggregationQuery

# return early on kokoro. Test project doesn't currently support pipelines
# TODO: enable pipeline verification when kokoro test project is whitelisted
if IS_KOKORO_TEST:
pytest.skip("skipping pipeline verification on kokoro")

def _clean_results(results):
if isinstance(results, dict):
return {k: _clean_results(v) for k, v in results.items()}
Expand Down Expand Up @@ -1694,7 +1682,6 @@ async def test_pipeline_explain_options_using_additional_options(
assert "Execution:" in text_stats


@pytest.mark.skipif(IS_KOKORO_TEST, reason="skipping pipeline verification on kokoro")
@pytest.mark.parametrize("database", [FIRESTORE_ENTERPRISE_DB], indirect=True)
async def test_pipeline_w_read_time(query_docs, cleanup, database):
collection, stored, allowed_vals = query_docs
Expand Down
12 changes: 0 additions & 12 deletions tests/unit/v1/test_pipeline_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,23 +213,20 @@ def test_ctor(self):
expected_transaction = object()
expected_read_time = 123
expected_explain_options = object()
expected_index_mode = "mode"
expected_addtl_options = {}
source = PipelineStream(
expected_type,
expected_pipeline,
expected_transaction,
expected_read_time,
expected_explain_options,
expected_index_mode,
expected_addtl_options,
)
instance = self._make_one(in_arr, source)
assert instance._return_type == expected_type
assert instance.pipeline == expected_pipeline
assert instance._client == expected_pipeline._client
assert instance._additonal_options == expected_addtl_options
assert instance._index_mode == expected_index_mode
assert instance._explain_options == expected_explain_options
assert instance._explain_stats is None
assert instance._started is True
Expand Down Expand Up @@ -281,7 +278,6 @@ def _mock_init_args(self):
"transaction": None,
"read_time": None,
"explain_options": None,
"index_mode": None,
"additional_options": {},
}

Expand Down Expand Up @@ -312,7 +308,6 @@ def test_explain_stats(self):
@pytest.mark.parametrize(
"init_kwargs,expected_options",
[
({"index_mode": "mode"}, {"index_mode": encode_value("mode")}),
(
{"explain_options": PipelineExplainOptions()},
{"explain_options": encode_value({"mode": "analyze"})},
Expand All @@ -336,13 +331,6 @@ def test_explain_stats(self):
},
{"explain_options": encode_value("override")},
),
(
{
"index_mode": "mode",
"additional_options": {"index_mode": Constant("new")},
},
{"index_mode": encode_value("new")},
),
],
)
def test_build_request_options(self, init_kwargs, expected_options):
Expand Down