Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 90 additions & 5 deletions .generator/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ def _write_json_file(path: str, updated_content: Dict):
json.dump(updated_content, f, indent=2)
f.write("\n")


def _add_new_library_source_roots(library_config: Dict, library_id: str) -> None:
"""Adds the default source_roots to the library configuration if not present.

Expand Down Expand Up @@ -307,13 +308,22 @@ def _copy_files_needed_for_post_processing(output: str, input: str, library_id:
"""

path_to_library = f"packages/{library_id}"
repo_metadata_path = f"{input}/{path_to_library}/.repo-metadata.json"

# We need to create these directories so that we can copy files necessary for post-processing.
os.makedirs(f"{output}/{path_to_library}/scripts/client-post-processing", exist_ok=True)
shutil.copy(
f"{input}/{path_to_library}/.repo-metadata.json",
f"{output}/{path_to_library}/.repo-metadata.json",
os.makedirs(
f"{output}/{path_to_library}/scripts/client-post-processing", exist_ok=True
)
# TODO(https://github.com/googleapis/librarian/issues/2334):
# if `.repo-metadata.json` for a library exists in
# `.librarian/generator-input`, then we override the generated `.repo-metadata.json`
# with what we have in `generator-input`. Remove this logic once the
# generated `.repo-metadata.json` file is completely backfilled.
if os.path.exists(repo_metadata_path):
shutil.copy(
repo_metadata_path,
f"{output}/{path_to_library}/.repo-metadata.json",
)

# copy post-procesing files
for post_processing_file in glob.glob(
Expand Down Expand Up @@ -360,6 +370,78 @@ def _clean_up_files_after_post_processing(output: str, library_id: str):
os.remove(gapic_version_file)


def _create_repo_metadata_from_service_config(
service_config_name: str, api_path: str, source: str, library_id: str
) -> Dict:
"""Creates the .repo-metadata.json content from the service config.

Args:
service_config_name (str): The name of the service config file.
api_path (str): The path to the API.
source (str): The path to the source directory.
library_id (str): The ID of the library.

Returns:
Dict: The content of the .repo-metadata.json file.
"""
full_service_config_path = f"{source}/{api_path}/{service_config_name}"

# TODO(https://github.com/googleapis/librarian/issues/2332): Read the api
# service config to backfill `.repo-metadata.json`.
return {
"api_shortname": "",
"name_pretty": "",
"product_documentation": "",
"api_description": "",
"client_documentation": "",
"issue_tracker": "",
"release_level": "",
"language": "python",
"library_type": "GAPIC_AUTO",
"repo": "googleapis/google-cloud-python",
"distribution_name": "",
"api_id": "",
}


def _generate_repo_metadata_file(
output: str, library_id: str, source: str, apis: List[Dict]
):
"""Generates the .repo-metadata.json file from the primary API service config.

Args:
output (str): The path to the output directory.
library_id (str): The ID of the library.
source (str): The path to the source directory.
apis (List[Dict]): A list of APIs to generate.
"""
path_to_library = f"packages/{library_id}"
output_repo_metadata = f"{output}/{path_to_library}/.repo-metadata.json"

# TODO(https://github.com/googleapis/librarian/issues/2334)): If `.repo-metadata.json`
# already exists in the `output` dir, then this means that it has been successfully copied
# over from the `input` dir and we can skip the logic here. Remove the following logic
# once we clean up all the `.repo-metadata.json` files from `.librarian/generator-input`.
if os.path.exists(output_repo_metadata):
return

os.makedirs(f"{output}/{path_to_library}", exist_ok=True)

# TODO(https://github.com/googleapis/librarian/issues/2333): Programatically
# determine the primary api to be used to
# to determine the information for metadata. For now, let's use the first
# api in the list.
primary_api = apis[0]

metadata_content = _create_repo_metadata_from_service_config(
primary_api.get("service_config"),
primary_api.get("path"),
source,
library_id,
)
_write_json_file(output_repo_metadata, metadata_content)


def handle_generate(
librarian: str = LIBRARIAN_DIR,
source: str = SOURCE_DIR,
Expand Down Expand Up @@ -398,6 +480,7 @@ def handle_generate(
if api_path:
_generate_api(api_path, library_id, source, output)
_copy_files_needed_for_post_processing(output, input, library_id)
_generate_repo_metadata_file(output, library_id, source, apis_to_generate)
_run_post_processor(output, library_id)
_clean_up_files_after_post_processing(output, library_id)
except Exception as e:
Expand Down Expand Up @@ -948,7 +1031,9 @@ def _process_changelog(
entry_parts.append(f"\n\n### {change_type_map[adjusted_change_type]}\n")
for change in library_changes:
commit_link = f"([{change[source_commit_hash_key]}]({_REPO_URL}/commit/{change[source_commit_hash_key]}))"
entry_parts.append(f"* {change[subject_key]} {change[body_key]} {commit_link}")
entry_parts.append(
f"* {change[subject_key]} {change[body_key]} {commit_link}"
)

new_entry_text = "\n".join(entry_parts)
anchor_pattern = re.compile(
Expand Down
79 changes: 77 additions & 2 deletions .generator/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,11 @@
_clean_up_files_after_post_processing,
_copy_files_needed_for_post_processing,
_create_main_version_header,
_create_repo_metadata_from_service_config,
_determine_generator_command,
_determine_library_namespace,
_generate_api,
_generate_repo_metadata_file,
_get_api_generator_options,
_get_library_dist_name,
_get_library_id,
Expand Down Expand Up @@ -722,13 +724,28 @@ def test_invalid_json(mocker):
_read_json_file("fake/path.json")


def test_copy_files_needed_for_post_processing_success(mocker):
def test_copy_files_needed_for_post_processing_copies_metadata_if_exists(mocker):
"""Tests that .repo-metadata.json is copied if it exists."""
mock_makedirs = mocker.patch("os.makedirs")
mock_shutil_copy = mocker.patch("shutil.copy")
mocker.patch("os.path.exists", return_value=True)

_copy_files_needed_for_post_processing("output", "input", "library_id")

mock_makedirs.assert_called()
mock_shutil_copy.assert_called_once()
mock_makedirs.assert_called()


def test_copy_files_needed_for_post_processing_skips_metadata_if_not_exists(mocker):
"""Tests that .repo-metadata.json is not copied if it does not exist."""
mock_makedirs = mocker.patch("os.makedirs")
mock_shutil_copy = mocker.patch("shutil.copy")
mocker.patch("os.path.exists", return_value=False)

_copy_files_needed_for_post_processing("output", "input", "library_id")

mock_shutil_copy.assert_not_called()
mock_makedirs.assert_called()


def test_clean_up_files_after_post_processing_success(mocker):
Expand Down Expand Up @@ -1071,6 +1088,64 @@ def test_determine_library_namespace_success(
assert namespace == expected_namespace


def test_create_repo_metadata_from_service_config():
"""Tests the creation of .repo-metadata.json content."""
service_config_name = "service_config.yaml"
api_path = "google/cloud/language/v1"
source = "/source"
library_id = "google-cloud-language"

metadata = _create_repo_metadata_from_service_config(
service_config_name, api_path, source, library_id
)

assert metadata["language"] == "python"
assert metadata["library_type"] == "GAPIC_AUTO"
assert metadata["repo"] == "googleapis/google-cloud-python"


def test_generate_repo_metadata_file(mocker):
"""Tests the generation of the .repo-metadata.json file."""
mock_write_json = mocker.patch("cli._write_json_file")
mock_create_metadata = mocker.patch(
"cli._create_repo_metadata_from_service_config",
return_value={"repo": "googleapis/google-cloud-python"},
)
mocker.patch("os.makedirs")

output = "/output"
library_id = "google-cloud-language"
source = "/source"
apis = [
{
"service_config": "service_config.yaml",
"path": "google/cloud/language/v1",
}
]

_generate_repo_metadata_file(output, library_id, source, apis)

mock_create_metadata.assert_called_once_with(
"service_config.yaml", "google/cloud/language/v1", source, library_id
)
mock_write_json.assert_called_once_with(
f"{output}/packages/{library_id}/.repo-metadata.json",
{"repo": "googleapis/google-cloud-python"},
)


def test_generate_repo_metadata_file_skips_if_exists(mocker):
"""Tests that the generation of the .repo-metadata.json file is skipped if it already exists."""
mock_write_json = mocker.patch("cli._write_json_file")
mock_create_metadata = mocker.patch("cli._create_repo_metadata_from_service_config")
mocker.patch("os.path.exists", return_value=True)

_generate_repo_metadata_file("output", "library_id", "source", [])

mock_create_metadata.assert_not_called()
mock_write_json.assert_not_called()


def test_determine_library_namespace_fails_not_subpath():
"""Tests that a ValueError is raised if the gapic path is not inside the package root."""
pkg_root_path = Path("repo/packages/my-lib")
Expand Down
Loading