Skip to content
Closed

wip #14825

Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .generator/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ RUN wget https://github.com/jgm/pandoc/releases/download/${PANDOC_VERSION}/pando
RUN tar -xvf pandoc-${PANDOC_VERSION}-linux-amd64.tar.gz -C pandoc-binary --strip-components=1

# Pin synthtool for a more hermetic build
RUN git clone https://github.com/googleapis/synthtool.git synthtool
RUN cd synthtool && git checkout 35313ccd8cdd2d12d2447ccdc497a7919aae1e3e
RUN git clone https://github.com/googleapis/synthtool.git synthtool6702a344265de050bceaff45d62358bb0023ba7d
RUN cd synthtool6702a344265de050bceaff45d62358bb0023ba7d && git checkout 6702a344265de050bceaff45d62358bb0023ba7d

# --- Final Stage ---
# This stage creates the lightweight final image, copying only the
Expand Down Expand Up @@ -102,7 +102,7 @@ COPY --from=builder protoc/bin /usr/local/bin
COPY --from=builder protoc/include /usr/local/include

COPY --from=builder pandoc-binary/bin /usr/local/bin
COPY --from=builder synthtool /synthtool
COPY --from=builder synthtool6702a344265de050bceaff45d62358bb0023ba7d /synthtool

# Copy all Python interpreters, their pip executables, and their standard libraries from the builder.
COPY --from=builder /usr/local/bin/python3.9 /usr/local/bin/
Expand Down
42 changes: 28 additions & 14 deletions .generator/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@

try:
import synthtool
from synthtool.languages import python_mono_repo
from synthtool.languages import python, python_mono_repo

SYNTHTOOL_INSTALLED = True
SYNTHTOOL_IMPORT_ERROR = None
Expand Down Expand Up @@ -316,7 +316,7 @@ def _get_library_id(request_data: Dict) -> str:
return library_id


def _run_post_processor(output: str, library_id: str):
def _run_post_processor(output: str, library_id: str, is_mono_repo: str):
"""Runs the synthtool post-processor on the output directory.

Args:
Expand All @@ -329,7 +329,10 @@ def _run_post_processor(output: str, library_id: str):
path_to_library = f"packages/{library_id}"
logger.info("Running Python post-processor...")
if SYNTHTOOL_INSTALLED:
python_mono_repo.owlbot_main(path_to_library)
if is_mono_repo:
python_mono_repo.owlbot_main(path_to_library)
else:
python.owlbot_main()
else:
raise SYNTHTOOL_IMPORT_ERROR # pragma: NO COVER

Expand All @@ -342,7 +345,7 @@ def _run_post_processor(output: str, library_id: str):
logger.info("Python post-processor ran successfully.")


def _copy_files_needed_for_post_processing(output: str, input: str, library_id: str):
def _copy_files_needed_for_post_processing(output: str, input: str, library_id: str, is_mono_repo: bool):
"""Copy files to the output directory whcih are needed during the post processing
step, such as .repo-metadata.json and script/client-post-processing, using
the input directory as the source.
Expand All @@ -354,8 +357,7 @@ def _copy_files_needed_for_post_processing(output: str, input: str, library_id:
which contains additional generator input.
library_id(str): The library id to be used for post processing.
"""

path_to_library = f"packages/{library_id}"
path_to_library = f"packages/{library_id}" if is_mono_repo else "."
repo_metadata_path = f"{input}/{path_to_library}/.repo-metadata.json"

# We need to create these directories so that we can copy files necessary for post-processing.
Expand All @@ -373,6 +375,14 @@ def _copy_files_needed_for_post_processing(output: str, input: str, library_id:
f"{output}/{path_to_library}/.repo-metadata.json",
)

if not is_mono_repo:
setup_py_path = f"{input}/setup.py"
if os.path.exists(setup_py_path):
shutil.copy(
setup_py_path,
f"{output}/setup.py",
)

# copy post-procesing files
for post_processing_file in glob.glob(
f"{input}/client-post-processing/*.yaml"
Expand Down Expand Up @@ -486,7 +496,7 @@ def _create_repo_metadata_from_service_config(


def _generate_repo_metadata_file(
output: str, library_id: str, source: str, apis: List[Dict]
output: str, library_id: str, source: str, apis: List[Dict], is_mono_repo: bool
):
"""Generates the .repo-metadata.json file from the primary API service config.

Expand All @@ -496,7 +506,7 @@ def _generate_repo_metadata_file(
source (str): The path to the source directory.
apis (List[Dict]): A list of APIs to generate.
"""
path_to_library = f"packages/{library_id}"
path_to_library = f"packages/{library_id}" if is_mono_repo else "."
output_repo_metadata = f"{output}/{path_to_library}/.repo-metadata.json"

# TODO(https://github.com/googleapis/librarian/issues/2334)): If `.repo-metadata.json`
Expand Down Expand Up @@ -593,6 +603,7 @@ def handle_generate(
"""

try:
is_mono_repo = _is_mono_repo(input)
# Read a generate-request.json file
request_data = _read_json_file(f"{librarian}/{GENERATE_REQUEST_FILE}")
library_id = _get_library_id(request_data)
Expand All @@ -601,10 +612,10 @@ def handle_generate(
for api in apis_to_generate:
api_path = api.get("path")
if api_path:
_generate_api(api_path, library_id, source, output, version)
_copy_files_needed_for_post_processing(output, input, library_id)
_generate_repo_metadata_file(output, library_id, source, apis_to_generate)
_run_post_processor(output, library_id)
_generate_api(api_path, library_id, source, output, version, is_mono_repo)
_copy_files_needed_for_post_processing(output, input, library_id, is_mono_repo)
_generate_repo_metadata_file(output, library_id, source, apis_to_generate, is_mono_repo)
_run_post_processor(output, library_id, is_mono_repo)
_copy_readme_to_docs(output, library_id)
_clean_up_files_after_post_processing(output, library_id)
except Exception as e:
Expand Down Expand Up @@ -821,7 +832,7 @@ def _stage_gapic_library(tmp_dir: str, staging_dir: str) -> None:


def _generate_api(
api_path: str, library_id: str, source: str, output: str, gapic_version: str
api_path: str, library_id: str, source: str, output: str, gapic_version: str, is_mono_repo: bool
):
"""
Handles the generation and staging process for a single API path.
Expand Down Expand Up @@ -855,8 +866,11 @@ def _generate_api(
api_path, is_proto_only_library
)
staging_dir = os.path.join(
output, "owl-bot-staging", library_id, staging_child_directory
output, "owl-bot-staging"
)
if is_mono_repo:
staging_dir = os.path.join(staging_dir, library_id)
staging_dir = os.path.join(staging_dir, staging_child_directory)

# 4. Stage the generated code
if is_proto_only_library:
Expand Down
3 changes: 3 additions & 0 deletions .generator/requirements.in
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,6 @@ starlark-pyo3>=2025.1
build
black==23.7.0
isort==5.11.0
# Temporarily pin google-api-core until we can bump the runtime here to 3.11
# https://github.com/googleapis/google-cloud-python/blob/c0baa3211766aec17b25450bbbafff61cb9e236c/.generator/Dockerfile#L61-L62
google-api-core<2.28.0
Loading