diff --git a/.generator/cli.py b/.generator/cli.py index ab805e9b312a..b09f5cb8f0cf 100644 --- a/.generator/cli.py +++ b/.generator/cli.py @@ -300,6 +300,13 @@ def _run_post_processor(output: str, library_id: str): python_mono_repo.owlbot_main(path_to_library) else: raise SYNTHTOOL_IMPORT_ERROR # pragma: NO COVER + + # If there is no noxfile, run `isort`` and `black` on the output. + # This is required for proto-only libraries which are not GAPIC. + if not Path(f"{output}/{path_to_library}/noxfile.py").exists(): + subprocess.run(["isort", output]) + subprocess.run(["black", output]) + logger.info("Python post-processor ran successfully.") @@ -541,7 +548,7 @@ def _read_bazel_build_py_rule(api_path: str, source: str) -> Dict: source (str): Path to the directory containing API protos. Returns: - Dict: A dictionary containing the parsed attributes of the `_py_gapic` rule. + Dict: A dictionary containing the parsed attributes of the `_py_gapic` rule, if found. """ build_file_path = f"{source}/{api_path}/BUILD.bazel" content = _read_text_file(build_file_path) @@ -549,8 +556,11 @@ def _read_bazel_build_py_rule(api_path: str, source: str) -> Dict: result = parse_googleapis_content.parse_content(content) py_gapic_entries = [key for key in result.keys() if key.endswith("_py_gapic")] - # Assuming only one _py_gapic rule per BUILD file for a given language - return result[py_gapic_entries[0]] + # Assuming at most one _py_gapic rule per BUILD file for a given language + if len(py_gapic_entries) > 0: + return result[py_gapic_entries[0]] + else: + return {} def _get_api_generator_options( @@ -598,6 +608,26 @@ def _get_api_generator_options( return generator_options +def _construct_protoc_command(api_path: str, tmp_dir: str) -> str: + """ + Constructs the full protoc command string. + + Args: + api_path (str): The relative path to the API directory. + tmp_dir (str): The temporary directory for protoc output. + + Returns: + str: The complete protoc command string suitable for shell execution. + """ + command_parts = [ + f"protoc {api_path}/*.proto", + f"--python_out={tmp_dir}", + f"--pyi_out={tmp_dir}", + ] + + return " ".join(command_parts) + + def _determine_generator_command( api_path: str, tmp_dir: str, generator_options: List[str] ) -> str: @@ -626,7 +656,7 @@ def _determine_generator_command( return " ".join(command_parts) -def _run_generator_command(generator_command: str, source: str): +def _run_protoc_command(generator_command: str, source: str): """ Executes the protoc generation command using subprocess. @@ -645,6 +675,75 @@ def _run_generator_command(generator_command: str, source: str): ) +def _get_staging_child_directory(api_path: str, is_proto_only_library: bool) -> str: + """ + Determines the correct sub-path within 'owl-bot-staging' for the generated code. + + For proto-only libraries, the structure is usually just the proto directory, + e.g., 'thing-py/google/thing'. + For GAPIC libraries, it's typically the version segment, e.g., 'v1'. + + Args: + api_path (str): The relative path to the API directory (e.g., 'google/cloud/language/v1'). + is_proto_only_library(bool): True, if this is a proto-only library. + + Returns: + str: The sub-directory name to use for staging. + """ + + version_candidate = api_path.split("/")[-1] + if version_candidate.startswith("v") and not is_proto_only_library: + return version_candidate + else: + # Fallback for non-'v' version segment + return f"{os.path.basename(api_path)}-py/{api_path}" + + +def _stage_proto_only_library( + api_path: str, source_dir: str, tmp_dir: str, staging_dir: str +) -> None: + """ + Handles staging for proto-only libraries (e.g., common protos). + + This involves copying the generated python files and the original proto files. + + Args: + api_path (str): The relative path to the API directory. + source_dir (str): Path to the directory containing API protos. + tmp_dir (str): The temporary directory where protoc output was placed. + staging_dir (str): The final destination for the staged code. + """ + # 1. Copy the generated Python files (e.g., *_pb2.py) from the protoc output + # The generated Python files are placed under a directory corresponding to `api_path` + # inside the temporary directory, since the protoc command ran with `api_path` + # specified. + shutil.copytree(f"{tmp_dir}/{api_path}", staging_dir, dirs_exist_ok=True) + + # 2. Copy the original proto files to the staging directory + # This is typically done for proto-only libraries so that the protos are included + # in the distributed package. + proto_glob_path = f"{source_dir}/{api_path}/*.proto" + for proto_file in glob.glob(proto_glob_path): + # The glob is expected to find the file inside the source_dir. + # We copy only the filename to the target staging directory. + shutil.copyfile(proto_file, f"{staging_dir}/{os.path.basename(proto_file)}") + + +def _stage_gapic_library(tmp_dir: str, staging_dir: str) -> None: + """ + Handles staging for GAPIC client libraries. + + This involves copying all contents from the temporary output directory. + + Args: + tmp_dir (str): The temporary directory where protoc/GAPIC generator output was placed. + staging_dir (str): The final destination for the staged code. + """ + # For GAPIC, the generator output is flat in `tmp_dir` and includes all + # necessary files like setup.py, client library, etc. + shutil.copytree(tmp_dir, staging_dir) + + def _generate_api( api_path: str, library_id: str, source: str, output: str, gapic_version: str ): @@ -660,18 +759,32 @@ def _generate_api( in a format which follows PEP-440. """ py_gapic_config = _read_bazel_build_py_rule(api_path, source) - generator_options = _get_api_generator_options( - api_path, py_gapic_config, gapic_version=gapic_version - ) + is_proto_only_library = len(py_gapic_config) == 0 with tempfile.TemporaryDirectory() as tmp_dir: - generator_command = _determine_generator_command( - api_path, tmp_dir, generator_options + # 1. Determine the command for code generation + if is_proto_only_library: + command = _construct_protoc_command(api_path, tmp_dir) + else: + generator_options = _get_api_generator_options( + api_path, py_gapic_config, gapic_version=gapic_version + ) + command = _determine_generator_command(api_path, tmp_dir, generator_options) + + # 2. Execute the code generation command + _run_protoc_command(command, source) + + # 3. Determine staging location + staging_child_directory = _get_staging_child_directory(api_path, is_proto_only_library) + staging_dir = os.path.join( + output, "owl-bot-staging", library_id, staging_child_directory ) - _run_generator_command(generator_command, source) - api_version = api_path.split("/")[-1] - staging_dir = os.path.join(output, "owl-bot-staging", library_id, api_version) - shutil.copytree(tmp_dir, staging_dir) + + # 4. Stage the generated code + if is_proto_only_library: + _stage_proto_only_library(api_path, source, tmp_dir, staging_dir) + else: + _stage_gapic_library(tmp_dir, staging_dir) def _run_nox_sessions(library_id: str, repo: str): diff --git a/.generator/requirements-test.in b/.generator/requirements-test.in index 80bf22b7b8d6..59ae478b8438 100644 --- a/.generator/requirements-test.in +++ b/.generator/requirements-test.in @@ -18,3 +18,5 @@ pytest-mock gcp-synthtool @ git+https://github.com/googleapis/synthtool@5aa438a342707842d11fbbb302c6277fbf9e4655 starlark-pyo3>=2025.1 build +black==23.7.0 +isort==5.11.0 diff --git a/.generator/requirements.in b/.generator/requirements.in index b1bfe27b6033..9153bb7f4829 100644 --- a/.generator/requirements.in +++ b/.generator/requirements.in @@ -3,3 +3,5 @@ gapic-generator>=1.27.0 nox starlark-pyo3>=2025.1 build +black==23.7.0 +isort==5.11.0 diff --git a/.generator/test_cli.py b/.generator/test_cli.py index d02a4075c9b3..701a2dcf556d 100644 --- a/.generator/test_cli.py +++ b/.generator/test_cli.py @@ -49,6 +49,7 @@ _get_libraries_to_prepare_for_release, _get_new_library_config, _get_previous_version, + _get_staging_child_directory, _add_new_library_version, _prepare_new_library_config, _process_changelog, @@ -57,9 +58,11 @@ _read_json_file, _read_text_file, _run_individual_session, - _run_generator_command, _run_nox_sessions, _run_post_processor, + _run_protoc_command, + _stage_gapic_library, + _stage_proto_only_library, _update_changelog_for_library, _update_global_changelog, _update_version_for_library, @@ -105,7 +108,7 @@ }, ] -_MOCK_BAZEL_CONTENT = """load( +_MOCK_BAZEL_CONTENT_PY_GAPIC = """load( "@com_google_googleapis_imports//:imports.bzl", "py_gapic_assembly_pkg", "py_gapic_library", @@ -126,6 +129,15 @@ ], )""" +_MOCK_BAZEL_CONTENT_PY_PROTO = """load( + "@com_google_googleapis_imports//:imports.bzl", + "py_proto_library", +) + +py_proto_library( + name = "language_py_proto", +)""" + @pytest.fixture def mock_generate_request_file(tmp_path, monkeypatch): @@ -205,7 +217,7 @@ def mock_build_bazel_file(tmp_path, monkeypatch): os.makedirs(bazel_build_dir, exist_ok=True) build_bazel_file = bazel_build_dir / os.path.basename(bazel_build_path) - build_bazel_file.write_text(_MOCK_BAZEL_CONTENT) + build_bazel_file.write_text(_MOCK_BAZEL_CONTENT_PY_GAPIC) return build_bazel_file @@ -438,7 +450,7 @@ def test_read_bazel_build_py_rule_success(mocker, mock_build_bazel_file): # Use the empty string as the source path, since the fixture has set the CWD to the temporary root. source_dir = "source" - mocker.patch("cli._read_text_file", return_value=_MOCK_BAZEL_CONTENT) + mocker.patch("cli._read_text_file", return_value=_MOCK_BAZEL_CONTENT_PY_GAPIC) # The fixture already creates the file, so we just need to call the function py_gapic_config = _read_bazel_build_py_rule(api_path, source_dir) @@ -451,6 +463,20 @@ def test_read_bazel_build_py_rule_success(mocker, mock_build_bazel_file): assert py_gapic_config["opt_args"] == ["python-gapic-namespace=google.cloud"] +def test_read_bazel_build_py_rule_not_found(mocker, mock_build_bazel_file): + """Tests successful parsing of a valid BUILD.bazel file for a proto-only library.""" + api_path = "google/cloud/language/v1" + # Use the empty string as the source path, since the fixture has set the CWD to the temporary root. + source_dir = "source" + + mocker.patch("cli._read_text_file", return_value=_MOCK_BAZEL_CONTENT_PY_PROTO) + # The fixture already creates the file, so we just need to call the function + py_gapic_config = _read_bazel_build_py_rule(api_path, source_dir) + + assert "language_py_gapic" not in py_gapic_config + assert py_gapic_config == {} + + def test_get_api_generator_options_all_options(): """Tests option extraction when all relevant fields are present.""" api_path = "google/cloud/language/v1" @@ -518,7 +544,7 @@ def test_determine_generator_command_no_options(): assert command == expected_command_no_options -def test_run_generator_command_success(mocker): +def test_run_protoc_command_success(mocker): """Tests successful execution of the protoc command.""" mock_run = mocker.patch( "cli.subprocess.run", return_value=MagicMock(stdout="ok", stderr="", check=True) @@ -526,14 +552,14 @@ def test_run_generator_command_success(mocker): command = "protoc api/*.proto --python_gapic_out=/tmp/out" source = "/src" - _run_generator_command(command, source) + _run_protoc_command(command, source) mock_run.assert_called_once_with( [command], cwd=source, shell=True, check=True, capture_output=True, text=True ) -def test_run_generator_command_failure(mocker): +def test_run_protoc_command_failure(mocker): """Tests failure when protoc command returns a non-zero exit code.""" mock_run = mocker.patch( "cli.subprocess.run", @@ -543,10 +569,37 @@ def test_run_generator_command_failure(mocker): source = "/src" with pytest.raises(subprocess.CalledProcessError): - _run_generator_command(command, source) + _run_protoc_command(command, source) + + +def test_generate_api_success_py_gapic(mocker, caplog): + caplog.set_level(logging.INFO) + API_PATH = "google/cloud/language/v1" + LIBRARY_ID = "google-cloud-language" + SOURCE = "source" + OUTPUT = "output" + gapic_version = "1.2.99" -def test_generate_api_success(mocker, caplog): + mock_read_bazel_build_py_rule = mocker.patch( + "cli._read_bazel_build_py_rule", + return_value={ + "py_gapic_library": { + "name": "language_py_gapic", + } + }, + ) + mock_run_protoc_command = mocker.patch("cli._run_protoc_command") + mock_shutil_copytree = mocker.patch("shutil.copytree") + + _generate_api(API_PATH, LIBRARY_ID, SOURCE, OUTPUT, gapic_version) + + mock_read_bazel_build_py_rule.assert_called_once() + mock_run_protoc_command.assert_called_once() + mock_shutil_copytree.assert_called_once() + + +def test_generate_api_success_py_proto(mocker, caplog): caplog.set_level(logging.INFO) API_PATH = "google/cloud/language/v1" @@ -555,18 +608,16 @@ def test_generate_api_success(mocker, caplog): OUTPUT = "output" gapic_version = "1.2.99" - mock_read_bazel_build_py_rule = mocker.patch("cli._read_bazel_build_py_rule") - mock_get_api_generator_options = mocker.patch("cli._get_api_generator_options") - mock_determine_generator_command = mocker.patch("cli._determine_generator_command") - mock_run_generator_command = mocker.patch("cli._run_generator_command") + mock_read_bazel_build_py_rule = mocker.patch( + "cli._read_bazel_build_py_rule", return_value={} + ) + mock_run_protoc_command = mocker.patch("cli._run_protoc_command") mock_shutil_copytree = mocker.patch("shutil.copytree") _generate_api(API_PATH, LIBRARY_ID, SOURCE, OUTPUT, gapic_version) mock_read_bazel_build_py_rule.assert_called_once() - mock_get_api_generator_options.assert_called_once() - mock_determine_generator_command.assert_called_once() - mock_run_generator_command.assert_called_once() + mock_run_protoc_command.assert_called_once() mock_shutil_copytree.assert_called_once() @@ -1304,3 +1355,89 @@ def test_verify_library_namespace_error_no_gapic_file(mocker, mock_path_class): # Verify the initial path logic still ran mock_path_class.assert_called_once_with("repo/packages/my-lib") + + +def test_get_staging_child_directory_gapic(): + """ + Tests the behavior for GAPIC clients with standard 'v' prefix versioning. + Should return only the version segment (e.g., 'v1'). + """ + # Standard v1 + api_path = "google/cloud/language/v1" + expected = "v1" + assert _get_staging_child_directory(api_path, False) == expected + + +def test_get_staging_child_directory_proto_only(): + """ + Tests the behavior for proto-only clients. + """ + # A non-versioned path segment + api_path = "google/protobuf" + expected = "protobuf-py/google/protobuf" + assert _get_staging_child_directory(api_path, True) == expected + + # A non-versioned path segment + api_path = "google/protobuf/v1" + expected = "v1-py/google/protobuf/v1" + assert _get_staging_child_directory(api_path, True) == expected + + +def test_stage_proto_only_library(mocker): + """ + Tests the file operations for proto-only library staging. + It should call copytree once for generated files and copyfile for each proto file. + """ + mock_shutil_copyfile = mocker.patch("shutil.copyfile") + mock_shutil_copytree = mocker.patch("shutil.copytree") + mock_glob_glob = mocker.patch("glob.glob") + + # Mock glob.glob to return a list of fake proto files + mock_proto_files = [ + "/home/source/google/cloud/common/types/common.proto", + "/home/source/google/cloud/common/types/status.proto", + ] + mock_glob_glob.return_value = mock_proto_files + + # Define test parameters + api_path = "google/cloud/common/types" + source_dir = "/home/source" + tmp_dir = "/tmp/protoc_output" + staging_dir = "/output/staging/types" + + _stage_proto_only_library(api_path, source_dir, tmp_dir, staging_dir) + + # Assertion 1: Check copytree was called exactly once to move generated Python files + mock_shutil_copytree.assert_called_once_with( + f"{tmp_dir}/{api_path}", staging_dir, dirs_exist_ok=True + ) + + # Assertion 2: Check glob.glob was called correctly + expected_glob_path = f"{source_dir}/{api_path}/*.proto" + mock_glob_glob.assert_called_once_with(expected_glob_path) + + # Assertion 3: Check copyfile was called once for each proto file found by glob + assert mock_shutil_copyfile.call_count == len(mock_proto_files) + + # Check the exact arguments for copyfile calls + mock_shutil_copyfile.assert_any_call( + mock_proto_files[0], f"{staging_dir}/{os.path.basename(mock_proto_files[0])}" + ) + mock_shutil_copyfile.assert_any_call( + mock_proto_files[1], f"{staging_dir}/{os.path.basename(mock_proto_files[1])}" + ) + + +def test_stage_gapic_library(mocker): + """ + Tests that _stage_gapic_library correctly calls shutil.copytree once, + copying everything from the temporary directory to the staging directory. + """ + tmp_dir = "/tmp/gapic_output" + staging_dir = "/output/staging/v1" + + mock_shutil_copytree = mocker.patch("shutil.copytree") + _stage_gapic_library(tmp_dir, staging_dir) + + # Assertion: Check copytree was called exactly once with the correct arguments + mock_shutil_copytree.assert_called_once_with(tmp_dir, staging_dir)