diff --git a/.generator/Dockerfile b/.generator/Dockerfile index b634e01217f9..afceb7fd4693 100644 --- a/.generator/Dockerfile +++ b/.generator/Dockerfile @@ -69,8 +69,10 @@ RUN wget https://github.com/jgm/pandoc/releases/download/${PANDOC_VERSION}/pando RUN tar -xvf pandoc-${PANDOC_VERSION}-linux-amd64.tar.gz -C pandoc-binary --strip-components=1 # Pin synthtool for a more hermetic build -RUN git clone https://github.com/googleapis/synthtool.git synthtool -RUN cd synthtool && git checkout 35313ccd8cdd2d12d2447ccdc497a7919aae1e3e +# This needs to be a single command so that the git clone command is not cached +RUN git clone https://github.com/googleapis/synthtool.git synthtool && \ + cd synthtool && \ + git checkout 6702a344265de050bceaff45d62358bb0023ba7d # --- Final Stage --- # This stage creates the lightweight final image, copying only the diff --git a/.generator/cli.py b/.generator/cli.py index 38312754d98f..f52d3019e2d6 100644 --- a/.generator/cli.py +++ b/.generator/cli.py @@ -33,7 +33,7 @@ try: import synthtool - from synthtool.languages import python_mono_repo + from synthtool.languages import python, python_mono_repo SYNTHTOOL_INSTALLED = True SYNTHTOOL_IMPORT_ERROR = None @@ -316,20 +316,28 @@ def _get_library_id(request_data: Dict) -> str: return library_id -def _run_post_processor(output: str, library_id: str): +def _run_post_processor(output: str, library_id: str, is_mono_repo: bool): """Runs the synthtool post-processor on the output directory. Args: output(str): Path to the directory in the container where code should be generated. library_id(str): The library id to be used for post processing. - + is_mono_repo(bool): True if the current repository is a mono-repo. """ os.chdir(output) - path_to_library = f"packages/{library_id}" + path_to_library = f"packages/{library_id}" if is_mono_repo else "." logger.info("Running Python post-processor...") if SYNTHTOOL_INSTALLED: - python_mono_repo.owlbot_main(path_to_library) + if is_mono_repo: + python_mono_repo.owlbot_main(path_to_library) + else: + # Some repositories have customizations in `owlbot.py`. If this file exists, + # run those customizations instead of `owlbot_main` + if Path(f"{output}/owlbot.py").exists(): + subprocess.run(["python3.14", f"{output}/owlbot.py"]) + else: + python.owlbot_main() else: raise SYNTHTOOL_IMPORT_ERROR # pragma: NO COVER @@ -342,7 +350,9 @@ def _run_post_processor(output: str, library_id: str): logger.info("Python post-processor ran successfully.") -def _copy_files_needed_for_post_processing(output: str, input: str, library_id: str): +def _copy_files_needed_for_post_processing( + output: str, input: str, library_id: str, is_mono_repo: bool +): """Copy files to the output directory whcih are needed during the post processing step, such as .repo-metadata.json and script/client-post-processing, using the input directory as the source. @@ -353,25 +363,23 @@ def _copy_files_needed_for_post_processing(output: str, input: str, library_id: input(str): The path to the directory in the container which contains additional generator input. library_id(str): The library id to be used for post processing. + is_mono_repo(bool): True if the current repository is a mono-repo. """ - path_to_library = f"packages/{library_id}" - repo_metadata_path = f"{input}/{path_to_library}/.repo-metadata.json" + path_to_library = f"packages/{library_id}" if is_mono_repo else "." + source_dir = f"{input}/{path_to_library}" + + shutil.copytree( + source_dir, + output, + dirs_exist_ok=True, + ignore=shutil.ignore_patterns("client-post-processing"), + ) # We need to create these directories so that we can copy files necessary for post-processing. os.makedirs( f"{output}/{path_to_library}/scripts/client-post-processing", exist_ok=True ) - # TODO(https://github.com/googleapis/librarian/issues/2334): - # if `.repo-metadata.json` for a library exists in - # `.librarian/generator-input`, then we override the generated `.repo-metadata.json` - # with what we have in `generator-input`. Remove this logic once the - # generated `.repo-metadata.json` file is completely backfilled. - if os.path.exists(repo_metadata_path): - shutil.copy( - repo_metadata_path, - f"{output}/{path_to_library}/.repo-metadata.json", - ) # copy post-procesing files for post_processing_file in glob.glob( @@ -385,7 +393,9 @@ def _copy_files_needed_for_post_processing(output: str, input: str, library_id: ) -def _clean_up_files_after_post_processing(output: str, library_id: str): +def _clean_up_files_after_post_processing( + output: str, library_id: str, is_mono_repo: bool +): """ Clean up files which should not be included in the generated client. This function is idempotent and will not fail if files are already removed. @@ -394,8 +404,9 @@ def _clean_up_files_after_post_processing(output: str, library_id: str): output(str): Path to the directory in the container where code should be generated. library_id(str): The library id to be used for post processing. + is_mono_repo(bool): True if the current repository is a mono-repo. """ - path_to_library = f"packages/{library_id}" + path_to_library = f"packages/{library_id}" if is_mono_repo else "." # Safely remove directories, ignoring errors if they don't exist. shutil.rmtree(f"{output}/{path_to_library}/.nox", ignore_errors=True) @@ -486,7 +497,7 @@ def _create_repo_metadata_from_service_config( def _generate_repo_metadata_file( - output: str, library_id: str, source: str, apis: List[Dict] + output: str, library_id: str, source: str, apis: List[Dict], is_mono_repo: bool ): """Generates the .repo-metadata.json file from the primary API service config. @@ -495,8 +506,9 @@ def _generate_repo_metadata_file( library_id (str): The ID of the library. source (str): The path to the source directory. apis (List[Dict]): A list of APIs to generate. + is_mono_repo(bool): True if the current repository is a mono-repo. """ - path_to_library = f"packages/{library_id}" + path_to_library = f"packages/{library_id}" if is_mono_repo else "." output_repo_metadata = f"{output}/{path_to_library}/.repo-metadata.json" # TODO(https://github.com/googleapis/librarian/issues/2334)): If `.repo-metadata.json` @@ -523,7 +535,7 @@ def _generate_repo_metadata_file( _write_json_file(output_repo_metadata, metadata_content) -def _copy_readme_to_docs(output: str, library_id: str): +def _copy_readme_to_docs(output: str, library_id: str, is_mono_repo: bool): """Copies the README.rst file for a generated library to docs/README.rst. This function is robust against various symlink configurations that could @@ -536,7 +548,7 @@ def _copy_readme_to_docs(output: str, library_id: str): should be generated. library_id(str): The library id. """ - path_to_library = f"packages/{library_id}" + path_to_library = f"packages/{library_id}" if is_mono_repo else "." source_path = f"{output}/{path_to_library}/README.rst" docs_path = f"{output}/{path_to_library}/docs" destination_path = f"{docs_path}/README.rst" @@ -593,6 +605,7 @@ def handle_generate( """ try: + is_mono_repo = _is_mono_repo(input) # Read a generate-request.json file request_data = _read_json_file(f"{librarian}/{GENERATE_REQUEST_FILE}") library_id = _get_library_id(request_data) @@ -601,12 +614,16 @@ def handle_generate( for api in apis_to_generate: api_path = api.get("path") if api_path: - _generate_api(api_path, library_id, source, output, version) - _copy_files_needed_for_post_processing(output, input, library_id) - _generate_repo_metadata_file(output, library_id, source, apis_to_generate) - _run_post_processor(output, library_id) - _copy_readme_to_docs(output, library_id) - _clean_up_files_after_post_processing(output, library_id) + _generate_api( + api_path, library_id, source, output, version, is_mono_repo + ) + _copy_files_needed_for_post_processing(output, input, library_id, is_mono_repo) + _generate_repo_metadata_file( + output, library_id, source, apis_to_generate, is_mono_repo + ) + _run_post_processor(output, library_id, is_mono_repo) + _copy_readme_to_docs(output, library_id, is_mono_repo) + _clean_up_files_after_post_processing(output, library_id, is_mono_repo) except Exception as e: raise ValueError("Generation failed.") from e logger.info("'generate' command executed.") @@ -821,7 +838,12 @@ def _stage_gapic_library(tmp_dir: str, staging_dir: str) -> None: def _generate_api( - api_path: str, library_id: str, source: str, output: str, gapic_version: str + api_path: str, + library_id: str, + source: str, + output: str, + gapic_version: str, + is_mono_repo: bool, ): """ Handles the generation and staging process for a single API path. @@ -833,6 +855,7 @@ def _generate_api( output (str): Path to the output directory where code should be staged. gapic_version(str): The desired version number for the GAPIC client library in a format which follows PEP-440. + is_mono_repo(bool): True if the current repository is a mono-repo. """ py_gapic_config = _read_bazel_build_py_rule(api_path, source) is_proto_only_library = len(py_gapic_config) == 0 @@ -854,9 +877,10 @@ def _generate_api( staging_child_directory = _get_staging_child_directory( api_path, is_proto_only_library ) - staging_dir = os.path.join( - output, "owl-bot-staging", library_id, staging_child_directory - ) + staging_dir = os.path.join(output, "owl-bot-staging") + if is_mono_repo: + staging_dir = os.path.join(staging_dir, library_id) + staging_dir = os.path.join(staging_dir, staging_child_directory) # 4. Stage the generated code if is_proto_only_library: @@ -1464,10 +1488,7 @@ def handle_release_init( f"{library_id} version: {previous_version}\n" ) - if is_mono_repo: - path_to_library = f"packages/{library_id}" - else: - path_to_library = "." + path_to_library = f"packages/{library_id}" if is_mono_repo else "." _update_version_for_library(repo, output, path_to_library, version) _update_changelog_for_library( diff --git a/.generator/test_cli.py b/.generator/test_cli.py index 25f209ed9211..d89ec3c6e953 100644 --- a/.generator/test_cli.py +++ b/.generator/test_cli.py @@ -456,21 +456,36 @@ def test_get_library_id_empty_id(): _get_library_id(request_data) -def test_run_post_processor_success(mocker, caplog): +@pytest.mark.parametrize( + "is_mono_repo,owlbot_py_exists", [(True, False), (False, False), (False, True)] +) +def test_run_post_processor_success(mocker, caplog, is_mono_repo, owlbot_py_exists): """ Tests that the post-processor helper calls the correct command. """ caplog.set_level(logging.INFO) mocker.patch("cli.SYNTHTOOL_INSTALLED", return_value=True) mock_chdir = mocker.patch("cli.os.chdir") - mock_owlbot_main = mocker.patch( - "cli.synthtool.languages.python_mono_repo.owlbot_main" + mocker.patch("pathlib.Path.exists", return_value=owlbot_py_exists) + mocker.patch( + "cli.subprocess.run", return_value=MagicMock(stdout="ok", stderr="", check=True) ) - _run_post_processor("output", "google-cloud-language") + + if is_mono_repo: + mock_owlbot = mocker.patch( + "cli.synthtool.languages.python_mono_repo.owlbot_main" + ) + elif not owlbot_py_exists: + mock_owlbot = mocker.patch("cli.synthtool.languages.python.owlbot_main") + _run_post_processor("output", "google-cloud-language", is_mono_repo) mock_chdir.assert_called_once() - mock_owlbot_main.assert_called_once_with("packages/google-cloud-language") + if is_mono_repo: + mock_owlbot.assert_called_once_with("packages/google-cloud-language") + elif not owlbot_py_exists: + mock_owlbot.assert_called_once_with() + assert "Python post-processor ran successfully." in caplog.text @@ -602,7 +617,8 @@ def test_run_protoc_command_failure(mocker): _run_protoc_command(command, source) -def test_generate_api_success_py_gapic(mocker, caplog): +@pytest.mark.parametrize("is_mono_repo", [False, True]) +def test_generate_api_success_py_gapic(mocker, caplog, is_mono_repo): caplog.set_level(logging.INFO) API_PATH = "google/cloud/language/v1" @@ -622,14 +638,15 @@ def test_generate_api_success_py_gapic(mocker, caplog): mock_run_protoc_command = mocker.patch("cli._run_protoc_command") mock_shutil_copytree = mocker.patch("shutil.copytree") - _generate_api(API_PATH, LIBRARY_ID, SOURCE, OUTPUT, gapic_version) + _generate_api(API_PATH, LIBRARY_ID, SOURCE, OUTPUT, gapic_version, is_mono_repo) mock_read_bazel_build_py_rule.assert_called_once() mock_run_protoc_command.assert_called_once() mock_shutil_copytree.assert_called_once() -def test_generate_api_success_py_proto(mocker, caplog): +@pytest.mark.parametrize("is_mono_repo", [False, True]) +def test_generate_api_success_py_proto(mocker, caplog, is_mono_repo): caplog.set_level(logging.INFO) API_PATH = "google/cloud/language/v1" @@ -644,15 +661,16 @@ def test_generate_api_success_py_proto(mocker, caplog): mock_run_protoc_command = mocker.patch("cli._run_protoc_command") mock_shutil_copytree = mocker.patch("shutil.copytree") - _generate_api(API_PATH, LIBRARY_ID, SOURCE, OUTPUT, gapic_version) + _generate_api(API_PATH, LIBRARY_ID, SOURCE, OUTPUT, gapic_version, is_mono_repo) mock_read_bazel_build_py_rule.assert_called_once() mock_run_protoc_command.assert_called_once() mock_shutil_copytree.assert_called_once() +@pytest.mark.parametrize("is_mono_repo", [False, True]) def test_handle_generate_success( - caplog, mock_generate_request_file, mock_build_bazel_file, mocker + caplog, mock_generate_request_file, mock_build_bazel_file, mocker, is_mono_repo ): """ Tests the successful execution path of handle_generate. @@ -668,15 +686,18 @@ def test_handle_generate_success( "cli._clean_up_files_after_post_processing" ) mocker.patch("cli._generate_repo_metadata_file") + mocker.patch("pathlib.Path.exists", return_value=is_mono_repo) handle_generate() - mock_run_post_processor.assert_called_once_with("output", "google-cloud-language") + mock_run_post_processor.assert_called_once_with( + "output", "google-cloud-language", is_mono_repo + ) mock_copy_files_needed_for_post_processing.assert_called_once_with( - "output", "input", "google-cloud-language" + "output", "input", "google-cloud-language", is_mono_repo ) mock_clean_up_files_after_post_processing.assert_called_once_with( - "output", "google-cloud-language" + "output", "google-cloud-language", is_mono_repo ) mock_generate_api.assert_called_once() @@ -831,34 +852,27 @@ def test_invalid_json(mocker): _read_json_file("fake/path.json") -def test_copy_files_needed_for_post_processing_copies_metadata_if_exists(mocker): +@pytest.mark.parametrize("is_mono_repo", [False, True]) +def test_copy_files_needed_for_post_processing_copies_files_from_generator_input( + mocker, is_mono_repo +): """Tests that .repo-metadata.json is copied if it exists.""" mock_makedirs = mocker.patch("os.makedirs") - mock_shutil_copy = mocker.patch("shutil.copy") - mocker.patch("os.path.exists", return_value=True) - - _copy_files_needed_for_post_processing("output", "input", "library_id") - - mock_shutil_copy.assert_called_once() - mock_makedirs.assert_called() - - -def test_copy_files_needed_for_post_processing_skips_metadata_if_not_exists(mocker): - """Tests that .repo-metadata.json is not copied if it does not exist.""" - mock_makedirs = mocker.patch("os.makedirs") - mock_shutil_copy = mocker.patch("shutil.copy") - mocker.patch("os.path.exists", return_value=False) + mock_shutil_copytree = mocker.patch("shutil.copytree") - _copy_files_needed_for_post_processing("output", "input", "library_id") + _copy_files_needed_for_post_processing( + "output", "input", "library_id", is_mono_repo + ) - mock_shutil_copy.assert_not_called() + mock_shutil_copytree.assert_called() mock_makedirs.assert_called() -def test_clean_up_files_after_post_processing_success(mocker): +@pytest.mark.parametrize("is_mono_repo", [False, True]) +def test_clean_up_files_after_post_processing_success(mocker, is_mono_repo): mock_shutil_rmtree = mocker.patch("shutil.rmtree") mock_os_remove = mocker.patch("os.remove") - _clean_up_files_after_post_processing("output", "library_id") + _clean_up_files_after_post_processing("output", "library_id", is_mono_repo) def test_get_libraries_to_prepare_for_release(mock_release_init_request_file): @@ -1377,7 +1391,8 @@ def test_create_repo_metadata_from_service_config(mocker): assert metadata["default_version"] == "v1" -def test_generate_repo_metadata_file(mocker): +@pytest.mark.parametrize("is_mono_repo", [False, True]) +def test_generate_repo_metadata_file(mocker, is_mono_repo): """Tests the generation of the .repo-metadata.json file.""" mock_write_json = mocker.patch("cli._write_json_file") mock_create_metadata = mocker.patch( @@ -1396,24 +1411,26 @@ def test_generate_repo_metadata_file(mocker): } ] - _generate_repo_metadata_file(output, library_id, source, apis) + _generate_repo_metadata_file(output, library_id, source, apis, is_mono_repo) mock_create_metadata.assert_called_once_with( "service_config.yaml", "google/cloud/language/v1", source, library_id ) + path_to_library = f"packages/{library_id}" if is_mono_repo else "." mock_write_json.assert_called_once_with( - f"{output}/packages/{library_id}/.repo-metadata.json", + f"{output}/{path_to_library}/.repo-metadata.json", {"repo": "googleapis/google-cloud-python"}, ) -def test_generate_repo_metadata_file_skips_if_exists(mocker): +@pytest.mark.parametrize("is_mono_repo", [False, True]) +def test_generate_repo_metadata_file_skips_if_exists(mocker, is_mono_repo): """Tests that the generation of the .repo-metadata.json file is skipped if it already exists.""" mock_write_json = mocker.patch("cli._write_json_file") mock_create_metadata = mocker.patch("cli._create_repo_metadata_from_service_config") mocker.patch("os.path.exists", return_value=True) - _generate_repo_metadata_file("output", "library_id", "source", []) + _generate_repo_metadata_file("output", "library_id", "source", [], is_mono_repo) mock_create_metadata.assert_not_called() mock_write_json.assert_not_called() @@ -1616,7 +1633,8 @@ def test_stage_gapic_library(mocker): ) -def test_copy_readme_to_docs(mocker): +@pytest.mark.parametrize("is_mono_repo", [False, True]) +def test_copy_readme_to_docs(mocker, is_mono_repo): """Tests that the README.rst is copied to the docs directory, handling symlinks.""" mock_makedirs = mocker.patch("os.makedirs") mock_shutil_copy = mocker.patch("shutil.copy") @@ -1629,11 +1647,12 @@ def test_copy_readme_to_docs(mocker): output = "output" library_id = "google-cloud-language" - _copy_readme_to_docs(output, library_id) + _copy_readme_to_docs(output, library_id, is_mono_repo) - expected_source = "output/packages/google-cloud-language/README.rst" - expected_docs_path = "output/packages/google-cloud-language/docs" - expected_destination = "output/packages/google-cloud-language/docs/README.rst" + path_to_library = f"packages/{library_id}" if is_mono_repo else "." + expected_source = f"output/{path_to_library}/README.rst" + expected_docs_path = f"output/{path_to_library}/docs" + expected_destination = f"output/{path_to_library}/docs/README.rst" mock_os_lexists.assert_called_once_with(expected_source) mock_open.assert_any_call(expected_source, "r") @@ -1645,7 +1664,8 @@ def test_copy_readme_to_docs(mocker): mock_open().write.assert_called_once_with("dummy content") -def test_copy_readme_to_docs_handles_symlink(mocker): +@pytest.mark.parametrize("is_mono_repo", [False, True]) +def test_copy_readme_to_docs_handles_symlink(mocker, is_mono_repo): """Tests that the README.rst is copied to the docs directory, handling symlinks.""" mock_makedirs = mocker.patch("os.makedirs") mock_shutil_copy = mocker.patch("shutil.copy") @@ -1664,11 +1684,12 @@ def test_copy_readme_to_docs_handles_symlink(mocker): output = "output" library_id = "google-cloud-language" - _copy_readme_to_docs(output, library_id) + _copy_readme_to_docs(output, library_id, is_mono_repo) - expected_source = "output/packages/google-cloud-language/README.rst" - expected_docs_path = "output/packages/google-cloud-language/docs" - expected_destination = "output/packages/google-cloud-language/docs/README.rst" + path_to_library = f"packages/{library_id}" if is_mono_repo else "." + expected_source = f"output/{path_to_library}/README.rst" + expected_docs_path = f"output/{path_to_library}/docs" + expected_destination = f"output/{path_to_library}/docs/README.rst" mock_os_lexists.assert_called_once_with(expected_source) mock_open.assert_any_call(expected_source, "r") @@ -1680,7 +1701,8 @@ def test_copy_readme_to_docs_handles_symlink(mocker): mock_open().write.assert_called_once_with("dummy content") -def test_copy_readme_to_docs_destination_path_is_symlink(mocker): +@pytest.mark.parametrize("is_mono_repo", [False, True]) +def test_copy_readme_to_docs_destination_path_is_symlink(mocker, is_mono_repo): """Tests that the README.rst is copied to the docs directory, handling destination_path being a symlink.""" mock_makedirs = mocker.patch("os.makedirs") mock_shutil_copy = mocker.patch("shutil.copy") @@ -1693,14 +1715,17 @@ def test_copy_readme_to_docs_destination_path_is_symlink(mocker): output = "output" library_id = "google-cloud-language" - _copy_readme_to_docs(output, library_id) + _copy_readme_to_docs(output, library_id, is_mono_repo) - expected_destination = "output/packages/google-cloud-language/docs/README.rst" + path_to_library = f"packages/{library_id}" if is_mono_repo else "." + expected_destination = f"output/{path_to_library}/docs/README.rst" mock_os_remove.assert_called_once_with(expected_destination) -def test_copy_readme_to_docs_source_not_exists(mocker): +@pytest.mark.parametrize("is_mono_repo", [False, True]) +def test_copy_readme_to_docs_source_not_exists(mocker, is_mono_repo): """Tests that the function returns early if the source README.rst does not exist.""" + mock_makedirs = mocker.patch("os.makedirs") mock_shutil_copy = mocker.patch("shutil.copy") mock_os_islink = mocker.patch("os.path.islink") @@ -1712,9 +1737,10 @@ def test_copy_readme_to_docs_source_not_exists(mocker): output = "output" library_id = "google-cloud-language" - _copy_readme_to_docs(output, library_id) + _copy_readme_to_docs(output, library_id, is_mono_repo) - expected_source = "output/packages/google-cloud-language/README.rst" + path_to_library = f"packages/{library_id}" if is_mono_repo else "." + expected_source = f"output/{path_to_library}/README.rst" mock_os_lexists.assert_called_once_with(expected_source) mock_open.assert_not_called()