Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 45 additions & 12 deletions .generator/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,24 @@ def _prepare_new_library_config(library_config: Dict) -> Dict:
return library_config


def _create_new_changelog_for_library(library_id: str, output: str):
"""Creates a new changelog for the library.

Args:
library_id(str): The id of the library.
output(str): Path to the directory in the container where code
should be generated.
"""
package_changelog_path = f"{output}/packages/{library_id}/CHANGELOG.md"
docs_changelog_path = f"{output}/packages/{library_id}/docs/CHANGELOG.md"

os.makedirs(os.path.dirname(package_changelog_path), exist_ok=True)
_write_text_file(package_changelog_path, "# Changelog\n")

os.makedirs(os.path.dirname(docs_changelog_path), exist_ok=True)
_write_text_file(docs_changelog_path, "# Changelog\n")


def handle_configure(
librarian: str = LIBRARIAN_DIR,
source: str = SOURCE_DIR,
Expand Down Expand Up @@ -267,6 +285,9 @@ def handle_configure(
)
prepared_config = _prepare_new_library_config(new_library_config)

library_id = _get_library_id(prepared_config)
_create_new_changelog_for_library(library_id, output)

# Write the new library configuration to configure-response.json.
_write_json_file(f"{librarian}/configure-response.json", prepared_config)

Expand Down Expand Up @@ -379,8 +400,6 @@ def _clean_up_files_after_post_processing(output: str, library_id: str):
shutil.rmtree(f"{output}/owl-bot-staging", ignore_errors=True)

# Safely remove specific files if they exist using pathlib.
Path(f"{output}/{path_to_library}/CHANGELOG.md").unlink(missing_ok=True)
Path(f"{output}/{path_to_library}/docs/CHANGELOG.md").unlink(missing_ok=True)

# The glob loops are already safe, as they do nothing if no files match.
for post_processing_file in glob.glob(
Expand Down Expand Up @@ -500,8 +519,8 @@ def _generate_repo_metadata_file(
_write_json_file(output_repo_metadata, metadata_content)


def _copy_readme_to_docs(output: str, library_id: str):
"""Copies the README.rst file for a generated library to docs/README.rst.
def _copy_file_to_docs(output: str, library_id: str, filename: str):
"""Copies a file for a generated library to the docs directory.

This function is robust against various symlink configurations that could
cause `shutil.copy` to fail with a `SameFileError`. It reads the content
Expand All @@ -512,20 +531,20 @@ def _copy_readme_to_docs(output: str, library_id: str):
output(str): Path to the directory in the container where code
should be generated.
library_id(str): The library id.
filename(str): The name of the file to copy.
"""
path_to_library = f"packages/{library_id}"
source_path = f"{output}/{path_to_library}/README.rst"
source_path = f"{output}/{path_to_library}/{filename}"
docs_path = f"{output}/{path_to_library}/docs"
destination_path = f"{docs_path}/README.rst"
destination_path = f"{docs_path}/{filename}"


# If the source file doesn't exist (not even as a broken symlink),
# there's nothing to copy.
if not os.path.lexists(source_path):
return

# Read the content from the source, which will resolve any symlinks.
with open(source_path, "r") as f:
content = f.read()
content = _read_text_file(source_path)

# Remove any symlinks at the destination to prevent errors.
if os.path.islink(destination_path):
Expand All @@ -535,17 +554,30 @@ def _copy_readme_to_docs(output: str, library_id: str):

# Ensure the destination directory exists as a real directory.
os.makedirs(docs_path, exist_ok=True)
_write_text_file(destination_path, content)


def _copy_readme_to_docs(output: str, library_id: str):
"""Copies the README.rst file for a generated library to docs/README.rst.

This function is a wrapper around `_copy_file_to_docs` for README.rst.

Args:
output(str): Path to the directory in the container where code
should be generated.
library_id(str): The library id.
"""
_copy_file_to_docs(output, library_id, "README.rst")


# Write the content to the destination, creating a new physical file.
with open(destination_path, "w") as f:
f.write(content)


def handle_generate(
librarian: str = LIBRARIAN_DIR,
source: str = SOURCE_DIR,
output: str = OUTPUT_DIR,
input: str = INPUT_DIR,
repo: str = REPO_DIR,
):
"""The main coordinator for the code generation process.

Expand Down Expand Up @@ -1522,6 +1554,7 @@ def handle_release_init(
source=args.source,
output=args.output,
input=args.input,
repo=args.repo,
)
elif args.command == "build":
args.func(librarian=args.librarian, repo=args.repo)
Expand Down
99 changes: 63 additions & 36 deletions .generator/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
_clean_up_files_after_post_processing,
_copy_files_needed_for_post_processing,
_create_main_version_header,
_create_new_changelog_for_library,
_create_repo_metadata_from_service_config,
_determine_generator_command,
_determine_library_namespace,
Expand Down Expand Up @@ -71,6 +72,7 @@
_write_json_file,
_write_text_file,
_copy_readme_to_docs,
_copy_file_to_docs,
handle_build,
handle_configure,
handle_generate,
Expand Down Expand Up @@ -291,17 +293,45 @@ def test_handle_configure_success(mock_configure_request_file, mocker):
mocker.patch("cli._update_global_changelog", return_value=None)
mock_write_json = mocker.patch("cli._write_json_file")
mock_prepare_config = mocker.patch(
"cli._prepare_new_library_config", return_value={"id": "prepared"}
"cli._prepare_new_library_config",
return_value={"id": "google-cloud-language"},
)
mock_create_changelog = mocker.patch("cli._create_new_changelog_for_library")

handle_configure()
handle_configure(output="output")

mock_prepare_config.assert_called_once()
mock_create_changelog.assert_called_once_with("google-cloud-language", "output")
mock_write_json.assert_called_once_with(
f"{LIBRARIAN_DIR}/configure-response.json", {"id": "prepared"}
f"{LIBRARIAN_DIR}/configure-response.json",
{"id": "google-cloud-language"},
)


def test_create_new_changelog_for_library(mocker):
"""Tests that the changelog files are created correctly."""
library_id = "google-cloud-language"
output = "output"
mock_makedirs = mocker.patch("os.makedirs")
mock_write_text_file = mocker.patch("cli._write_text_file")

_create_new_changelog_for_library(library_id, output)

package_changelog_path = f"{output}/packages/{library_id}/CHANGELOG.md"
docs_changelog_path = f"{output}/packages/{library_id}/docs/CHANGELOG.md"

# Check that makedirs was called for both parent directories
mock_makedirs.assert_any_call(os.path.dirname(package_changelog_path), exist_ok=True)
mock_makedirs.assert_any_call(os.path.dirname(docs_changelog_path), exist_ok=True)
assert mock_makedirs.call_count == 2

# Check that the files were "written" with the correct content
mock_write_text_file.assert_any_call(package_changelog_path, "# Changelog\n")
mock_write_text_file.assert_any_call(docs_changelog_path, "# Changelog\n")
assert mock_write_text_file.call_count == 2



def test_handle_configure_no_new_library(mocker):
"""Tests that handle_configure fails if no new library is found."""
mocker.patch("cli._read_json_file", return_value={"libraries": []})
Expand Down Expand Up @@ -640,6 +670,7 @@ def test_handle_generate_success(
"cli._clean_up_files_after_post_processing"
)
mocker.patch("cli._generate_repo_metadata_file")
mocker.patch("cli._copy_readme_to_docs")

handle_generate()

Expand Down Expand Up @@ -1572,44 +1603,12 @@ def test_copy_readme_to_docs(mocker):
mock_os_islink.assert_any_call(expected_destination)
mock_os_islink.assert_any_call(expected_docs_path)
mock_os_remove.assert_not_called()
mock_makedirs.assert_called_once_with(expected_docs_path, exist_ok=True)
mock_makedirs.assert_called_with(Path(expected_docs_path), exist_ok=True)
mock_open.assert_any_call(expected_destination, "w")
mock_open().write.assert_called_once_with("dummy content")


def test_copy_readme_to_docs_handles_symlink(mocker):
"""Tests that the README.rst is copied to the docs directory, handling symlinks."""
mock_makedirs = mocker.patch("os.makedirs")
mock_shutil_copy = mocker.patch("shutil.copy")
mock_os_islink = mocker.patch("os.path.islink")
mock_os_remove = mocker.patch("os.remove")
mock_os_lexists = mocker.patch("os.path.lexists", return_value=True)
mock_open = mocker.patch(
"builtins.open", mocker.mock_open(read_data="dummy content")
)

# Simulate docs_path being a symlink
mock_os_islink.side_effect = [
False,
True,
] # First call for destination_path, second for docs_path

output = "output"
library_id = "google-cloud-language"
_copy_readme_to_docs(output, library_id)

expected_source = "output/packages/google-cloud-language/README.rst"
expected_docs_path = "output/packages/google-cloud-language/docs"
expected_destination = "output/packages/google-cloud-language/docs/README.rst"

mock_os_lexists.assert_called_once_with(expected_source)
mock_open.assert_any_call(expected_source, "r")
mock_os_islink.assert_any_call(expected_destination)
mock_os_islink.assert_any_call(expected_docs_path)
mock_os_remove.assert_called_once_with(expected_docs_path)
mock_makedirs.assert_called_once_with(expected_docs_path, exist_ok=True)
mock_open.assert_any_call(expected_destination, "w")
mock_open().write.assert_called_once_with("dummy content")


def test_copy_readme_to_docs_destination_path_is_symlink(mocker):
Expand Down Expand Up @@ -1654,3 +1653,31 @@ def test_copy_readme_to_docs_source_not_exists(mocker):
mock_os_remove.assert_not_called()
mock_makedirs.assert_not_called()
mock_shutil_copy.assert_not_called()


def test_copy_file_to_docs_docs_path_is_symlink(mocker):
"""Tests that the file is copied to the docs directory, handling docs_path being a symlink."""
mock_makedirs = mocker.patch("os.makedirs")
mock_os_remove = mocker.patch("os.remove")
mock_os_lexists = mocker.patch("os.path.lexists", return_value=True)
mock_open = mocker.patch(
"builtins.open", mocker.mock_open(read_data="dummy content")
)

output = "output"
library_id = "google-cloud-language"
filename = "README.rst"
docs_path = f"{output}/packages/{library_id}/docs"

def islink_side_effect(path):
if path == docs_path:
return True
return False

mocker.patch("os.path.islink", side_effect=islink_side_effect)

_copy_file_to_docs(output, library_id, filename)

mock_os_remove.assert_called_once_with(docs_path)


21 changes: 20 additions & 1 deletion .librarian/state.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator:latest
image: python-librarian-generator:latest
libraries:
- id: google-ads-admanager
version: 0.5.0
Expand Down Expand Up @@ -2092,6 +2092,25 @@ libraries:
remove_regex:
- packages/google-cloud-gke-multicloud
tag_format: '{id}-v{version}'
- id: google-cloud-gkerecommender
version: 0.0.0
last_generated_commit: 1b5c44879f3281d05731a0bf3fc0345ff4463eed
apis:
- path: google/cloud/gkerecommender/v1
service_config: gkerecommender_v1.yaml
source_roots:
- packages/google-cloud-gkerecommender
preserve_regex:
- packages/google-cloud-gkerecommender/CHANGELOG.md
- docs/CHANGELOG.md
- docs/README.rst
- samples/README.txt
- scripts/client-post-processing
- samples/snippets/README.rst
- tests/system
remove_regex:
- packages/google-cloud-gkerecommender
tag_format: '{id}-v{version}'
- id: google-cloud-gsuiteaddons
version: 0.3.18
last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd
Expand Down
13 changes: 13 additions & 0 deletions packages/google-cloud-gkerecommender/.coveragerc
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
[run]
branch = True

[report]
show_missing = True
omit =
google/cloud/gkerecommender/__init__.py
google/cloud/gkerecommender/gapic_version.py
exclude_lines =
# Re-enable the standard pragma
pragma: NO COVER
# Ignore debug-only repr
def __repr__
34 changes: 34 additions & 0 deletions packages/google-cloud-gkerecommender/.flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
[flake8]
# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333):
# Resolve flake8 lint issues
ignore = E203, E231, E266, E501, W503
exclude =
# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333):
# Ensure that generated code passes flake8 lint
**/gapic/**
**/services/**
**/types/**
# Exclude Protobuf gencode
*_pb2.py

# Standard linting exemptions.
**/.nox/**
__pycache__,
.git,
*.pyc,
conf.py
16 changes: 16 additions & 0 deletions packages/google-cloud-gkerecommender/.repo-metadata.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"name": "google-cloud-gkerecommender",
"name_pretty": "GKE Recommender API",
"api_description": "GKE Recommender API",
"product_documentation": "https://cloud.google.com/kubernetes-engine/docs/how-to/machine-learning/inference-quickstart",
"client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-gkerecommender/latest",
"issue_tracker": "https://issuetracker.google.com/issues/new?component=1790908",
"release_level": "preview",
"language": "python",
"library_type": "GAPIC_AUTO",
"repo": "googleapis/google-cloud-python",
"distribution_name": "google-cloud-gkerecommender",
"api_id": "gkerecommender.googleapis.com",
"default_version": "v1",
"api_shortname": "gkerecommender"
}
1 change: 1 addition & 0 deletions packages/google-cloud-gkerecommender/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# Changelog
Loading
Loading