Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
80 changes: 62 additions & 18 deletions .generator/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ def handle_configure(
source: str = SOURCE_DIR,
repo: str = REPO_DIR,
input: str = INPUT_DIR,
output: str = OUTPUT_DIR
output: str = OUTPUT_DIR,
):
"""Onboards a new library by completing its configuration.

Expand Down Expand Up @@ -259,7 +259,7 @@ def handle_configure(
# configure-request.json contains the library definitions.
request_data = _read_json_file(f"{librarian}/{CONFIGURE_REQUEST_FILE}")
new_library_config = _get_new_library_config(request_data)

_update_global_changelog(
f"{repo}/CHANGELOG.md",
f"{output}/CHANGELOG.md",
Expand Down Expand Up @@ -1109,7 +1109,9 @@ def _process_version_file(content, version, version_path) -> str:

Returns: A string with the modified content.
"""
if version_path.name.endswith("gapic_version.py"):
if version_path.name.endswith("gapic_version.py") or version_path.name.endswith(
"version.py"
):
pattern = r"(__version__\s*=\s*[\"'])([^\"']+)([\"'].*)"
else:
pattern = r"(version\s*=\s*[\"'])([^\"']+)([\"'].*)"
Expand All @@ -1125,7 +1127,7 @@ def _process_version_file(content, version, version_path) -> str:
def _update_version_for_library(
repo: str, output: str, path_to_library: str, version: str
):
"""Updates the version string in `**/gapic_version.py`, `setup.py`,
"""Updates the version string in `**/gapic_version.py`, `**/version.py`, `setup.py`,
`pyproject.toml` and `samples/**/snippet_metadata.json` for a
given library, if applicable.

Expand All @@ -1139,12 +1141,31 @@ def _update_version_for_library(
version(str): The new version of the library

Raises: `ValueError` if a version string could not be located in `**/gapic_version.py`
within the given library.
or `**/version.py` within the given library.
"""

# Find and update gapic_version.py files
version_files = list(Path(f"{repo}/{path_to_library}").rglob("**/gapic_version.py"))
if len(version_files) == 0:
# Find and update version.py or gapic_version.py files
search_base = Path(f"{repo}/{path_to_library}")
version_files = list(search_base.rglob("**/gapic_version.py"))
excluded_dirs = {
".nox",
".venv",
"venv",
"site-packages",
".git",
"build",
"dist",
"__pycache__",
}
version_files.extend(
[
p
for p in search_base.rglob("**/version.py")
if not any(part in excluded_dirs for part in p.parts)
]
)

if not version_files:
# Fallback to `pyproject.toml`` or `setup.py``. Proto-only libraries have
# version information in `setup.py` or `pyproject.toml` instead of `gapic_version.py`.
pyproject_toml = Path(f"{repo}/{path_to_library}/pyproject.toml")
Expand All @@ -1160,7 +1181,7 @@ def _update_version_for_library(

# Find and update snippet_metadata.json files
snippet_metadata_files = Path(f"{repo}/{path_to_library}").rglob(
"samples/**/*.json"
"samples/**/*snippet*.json"
)
for metadata_file in snippet_metadata_files:
output_path = f"{output}/{metadata_file.relative_to(repo)}"
Expand Down Expand Up @@ -1300,6 +1321,7 @@ def _update_changelog_for_library(
version: str,
previous_version: str,
library_id: str,
relative_path: str,
):
"""Prepends a new release entry with multiple, grouped changes, to a changelog.

Expand All @@ -1316,8 +1338,6 @@ def _update_changelog_for_library(
library_id(str): The id of the library where the changelog should
be updated.
"""

relative_path = f"packages/{library_id}/CHANGELOG.md"
changelog_src = f"{repo}/{relative_path}"
changelog_dest = f"{output}/{relative_path}"
updated_content = _process_changelog(
Expand All @@ -1330,6 +1350,19 @@ def _update_changelog_for_library(
_write_text_file(changelog_dest, updated_content)


def _is_mono_repo(repo: str) -> bool:
"""Determines if a library is generated or handwritten.

Args:
repo(str): This directory will contain all directories that make up a
library, the .librarian folder, and any global file declared in
the config.yaml.

Returns: True if the library is generated, False otherwise.
"""
return Path(f"{repo}/packages").exists()


def handle_release_init(
librarian: str = LIBRARIAN_DIR, repo: str = REPO_DIR, output: str = OUTPUT_DIR
):
Expand Down Expand Up @@ -1357,27 +1390,30 @@ def handle_release_init(
`release-init-request.json` file in the given
librarian directory cannot be read.
"""

try:
is_mono_repo = _is_mono_repo(repo)

# Read a release-init-request.json file
request_data = _read_json_file(f"{librarian}/{RELEASE_INIT_REQUEST_FILE}")
libraries_to_prep_for_release = _get_libraries_to_prepare_for_release(
request_data
)

_update_global_changelog(
f"{repo}/CHANGELOG.md",
f"{output}/CHANGELOG.md",
libraries_to_prep_for_release,
)
if is_mono_repo:

# only a mono repo has a global changelog
_update_global_changelog(
f"{repo}/CHANGELOG.md",
f"{output}/CHANGELOG.md",
libraries_to_prep_for_release,
)

# Prepare the release for each library by updating the
# library specific version files and library specific changelog.
for library_release_data in libraries_to_prep_for_release:
version = library_release_data["version"]
library_id = library_release_data["id"]
library_changes = library_release_data["changes"]
path_to_library = f"packages/{library_id}"

# Get previous version from state.yaml
previous_version = _get_previous_version(library_id, librarian)
Expand All @@ -1387,6 +1423,13 @@ def handle_release_init(
f"{library_id} version: {previous_version}\n"
)

if is_mono_repo:
path_to_library = f"packages/{library_id}"
changelog_relative_path = f"packages/{library_id}/CHANGELOG.md"
else:
path_to_library = "."
changelog_relative_path = "CHANGELOG.md"

_update_version_for_library(repo, output, path_to_library, version)
_update_changelog_for_library(
repo,
Expand All @@ -1395,6 +1438,7 @@ def handle_release_init(
version,
previous_version,
library_id,
relative_path=changelog_relative_path,
)

except Exception as e:
Expand Down
1 change: 1 addition & 0 deletions .generator/parse_googleapis_content.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@
"glob",
)


def parse_content(content: str) -> dict:
"""Parses content from BUILD.bazel and returns a dictionary
containing bazel rules and arguments.
Expand Down
66 changes: 54 additions & 12 deletions .generator/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -856,6 +856,24 @@ def test_handle_release_init_success(mocker, mock_release_init_request_file):
handle_release_init()


def test_handle_release_init_is_generated_success(
mocker, mock_release_init_request_file
):
"""
Tests that `handle_release_init` calls `_update_global_changelog` when the
`packages` directory exists.
"""
mocker.patch("pathlib.Path.exists", return_value=True)
mock_update_global_changelog = mocker.patch("cli._update_global_changelog")
mocker.patch("cli._update_version_for_library")
mocker.patch("cli._get_previous_version", return_value="1.2.2")
mocker.patch("cli._update_changelog_for_library")

handle_release_init()

mock_update_global_changelog.assert_called_once()


def test_handle_release_init_fail_value_error_file():
"""
Tests that handle_release_init fails to read `librarian/release-init-request.json`.
Expand Down Expand Up @@ -970,9 +988,12 @@ def test_update_global_changelog(mocker, mock_release_init_request_file):
def test_update_version_for_library_success_gapic(mocker):
m = mock_open()

mock_rglob = mocker.patch(
"pathlib.Path.rglob", return_value=[pathlib.Path("repo/gapic_version.py")]
)
mock_rglob = mocker.patch("pathlib.Path.rglob")
mock_rglob.side_effect = [
[pathlib.Path("repo/gapic_version.py")], # 1st call (gapic_version.py)
[], # 2nd call (version.py)
[pathlib.Path("repo/samples/snippet_metadata.json")], # 3rd call (snippets)
]
mock_shutil_copy = mocker.patch("shutil.copy")
mock_content = '__version__ = "1.2.2"'
mock_json_metadata = {"clientLibrary": {"version": "0.1.0"}}
Expand Down Expand Up @@ -1002,7 +1023,11 @@ def test_update_version_for_library_success_proto_only_setup_py(mocker):
m = mock_open()

mock_rglob = mocker.patch("pathlib.Path.rglob")
mock_rglob.side_effect = [[], [pathlib.Path("repo/setup.py")]]
mock_rglob.side_effect = [
[],
[pathlib.Path("repo/setup.py")],
[pathlib.Path("repo/samples/snippet_metadata.json")],
]
mock_shutil_copy = mocker.patch("shutil.copy")
mock_content = 'version = "1.2.2"'
mock_json_metadata = {"clientLibrary": {"version": "0.1.0"}}
Expand All @@ -1028,12 +1053,16 @@ def test_update_version_for_library_success_proto_only_setup_py(mocker):
)


def test_update_version_for_library_success_proto_only_py_project_toml(mocker):
def test_update_version_for_library_success_proto_only_pyproject_toml(mocker):
m = mock_open()

mock_path_exists = mocker.patch("pathlib.Path.exists")
mock_path_exists = mocker.patch("pathlib.Path.exists", return_value=True)
mock_rglob = mocker.patch("pathlib.Path.rglob")
mock_rglob.side_effect = [[], [pathlib.Path("repo/pyproject.toml")]]
mock_rglob.side_effect = [
[], # gapic_version.py
[], # version.py
[pathlib.Path("repo/samples/snippet_metadata.json")],
]
mock_shutil_copy = mocker.patch("shutil.copy")
mock_content = 'version = "1.2.2"'
mock_json_metadata = {"clientLibrary": {"version": "0.1.0"}}
Expand Down Expand Up @@ -1108,6 +1137,7 @@ def test_update_changelog_for_library_success(mocker):
"1.2.3",
"1.2.2",
"google-cloud-language",
"CHANGELOG.md",
)


Expand Down Expand Up @@ -1157,6 +1187,7 @@ def test_update_changelog_for_library_failure(mocker):
"1.2.3",
"1.2.2",
"google-cloud-language",
"CHANGELOG.md",
)


Expand Down Expand Up @@ -1524,7 +1555,9 @@ def test_copy_readme_to_docs(mocker):
mock_os_islink = mocker.patch("os.path.islink", return_value=False)
mock_os_remove = mocker.patch("os.remove")
mock_os_lexists = mocker.patch("os.path.lexists", return_value=True)
mock_open = mocker.patch("builtins.open", mocker.mock_open(read_data="dummy content"))
mock_open = mocker.patch(
"builtins.open", mocker.mock_open(read_data="dummy content")
)

output = "output"
library_id = "google-cloud-language"
Expand All @@ -1551,10 +1584,15 @@ def test_copy_readme_to_docs_handles_symlink(mocker):
mock_os_islink = mocker.patch("os.path.islink")
mock_os_remove = mocker.patch("os.remove")
mock_os_lexists = mocker.patch("os.path.lexists", return_value=True)
mock_open = mocker.patch("builtins.open", mocker.mock_open(read_data="dummy content"))
mock_open = mocker.patch(
"builtins.open", mocker.mock_open(read_data="dummy content")
)

# Simulate docs_path being a symlink
mock_os_islink.side_effect = [False, True] # First call for destination_path, second for docs_path
mock_os_islink.side_effect = [
False,
True,
] # First call for destination_path, second for docs_path

output = "output"
library_id = "google-cloud-language"
Expand All @@ -1581,7 +1619,9 @@ def test_copy_readme_to_docs_destination_path_is_symlink(mocker):
mock_os_islink = mocker.patch("os.path.islink", return_value=True)
mock_os_remove = mocker.patch("os.remove")
mock_os_lexists = mocker.patch("os.path.lexists", return_value=True)
mock_open = mocker.patch("builtins.open", mocker.mock_open(read_data="dummy content"))
mock_open = mocker.patch(
"builtins.open", mocker.mock_open(read_data="dummy content")
)

output = "output"
library_id = "google-cloud-language"
Expand All @@ -1598,7 +1638,9 @@ def test_copy_readme_to_docs_source_not_exists(mocker):
mock_os_islink = mocker.patch("os.path.islink")
mock_os_remove = mocker.patch("os.remove")
mock_os_lexists = mocker.patch("os.path.lexists", return_value=False)
mock_open = mocker.patch("builtins.open", mocker.mock_open(read_data="dummy content"))
mock_open = mocker.patch(
"builtins.open", mocker.mock_open(read_data="dummy content")
)

output = "output"
library_id = "google-cloud-language"
Expand Down
Loading