Skip to content

Commit b76f003

Browse files
committed
chore(librarian): add support for split repository
1 parent 6b4b00d commit b76f003

File tree

3 files changed

+77
-31
lines changed

3 files changed

+77
-31
lines changed

.generator/Dockerfile

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,8 +69,10 @@ RUN wget https://github.com/jgm/pandoc/releases/download/${PANDOC_VERSION}/pando
6969
RUN tar -xvf pandoc-${PANDOC_VERSION}-linux-amd64.tar.gz -C pandoc-binary --strip-components=1
7070

7171
# Pin synthtool for a more hermetic build
72-
RUN git clone https://github.com/googleapis/synthtool.git synthtool
73-
RUN cd synthtool && git checkout 35313ccd8cdd2d12d2447ccdc497a7919aae1e3e
72+
# This needs to be a single command so that the git clone command is not cached
73+
RUN git clone https://github.com/googleapis/synthtool.git synthtool && \
74+
cd synthtool && \
75+
git checkout 35313ccd8cdd2d12d2447ccdc497a7919aae1e3e
7476

7577
# --- Final Stage ---
7678
# This stage creates the lightweight final image, copying only the

.generator/cli.py

Lines changed: 60 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333

3434
try:
3535
import synthtool
36-
from synthtool.languages import python_mono_repo
36+
from synthtool.languages import python, python_mono_repo
3737

3838
SYNTHTOOL_INSTALLED = True
3939
SYNTHTOOL_IMPORT_ERROR = None
@@ -316,20 +316,28 @@ def _get_library_id(request_data: Dict) -> str:
316316
return library_id
317317

318318

319-
def _run_post_processor(output: str, library_id: str):
319+
def _run_post_processor(output: str, library_id: str, is_mono_repo: bool):
320320
"""Runs the synthtool post-processor on the output directory.
321321
322322
Args:
323323
output(str): Path to the directory in the container where code
324324
should be generated.
325325
library_id(str): The library id to be used for post processing.
326-
326+
is_mono_repo(bool): True if the current repository is a mono-repo.
327327
"""
328328
os.chdir(output)
329329
path_to_library = f"packages/{library_id}"
330330
logger.info("Running Python post-processor...")
331331
if SYNTHTOOL_INSTALLED:
332-
python_mono_repo.owlbot_main(path_to_library)
332+
if is_mono_repo:
333+
python_mono_repo.owlbot_main(path_to_library)
334+
else:
335+
# Some repositories have customizations in `owlbot.py`. If this file exists,
336+
# run those customizations instead of `owlbot_main`
337+
if Path(f"{output}/owlbot.py").exists():
338+
subprocess.run(["python3.14", f"{output}/owlbot.py"])
339+
else:
340+
python.owlbot_main(path_to_library)
333341
else:
334342
raise SYNTHTOOL_IMPORT_ERROR # pragma: NO COVER
335343

@@ -342,7 +350,9 @@ def _run_post_processor(output: str, library_id: str):
342350
logger.info("Python post-processor ran successfully.")
343351

344352

345-
def _copy_files_needed_for_post_processing(output: str, input: str, library_id: str):
353+
def _copy_files_needed_for_post_processing(
354+
output: str, input: str, library_id: str, is_mono_repo: bool
355+
):
346356
"""Copy files to the output directory whcih are needed during the post processing
347357
step, such as .repo-metadata.json and script/client-post-processing, using
348358
the input directory as the source.
@@ -353,9 +363,9 @@ def _copy_files_needed_for_post_processing(output: str, input: str, library_id:
353363
input(str): The path to the directory in the container
354364
which contains additional generator input.
355365
library_id(str): The library id to be used for post processing.
366+
is_mono_repo(bool): True if the current repository is a mono-repo.
356367
"""
357-
358-
path_to_library = f"packages/{library_id}"
368+
path_to_library = f"packages/{library_id}" if is_mono_repo else "."
359369
repo_metadata_path = f"{input}/{path_to_library}/.repo-metadata.json"
360370

361371
# We need to create these directories so that we can copy files necessary for post-processing.
@@ -373,6 +383,20 @@ def _copy_files_needed_for_post_processing(output: str, input: str, library_id:
373383
f"{output}/{path_to_library}/.repo-metadata.json",
374384
)
375385

386+
if not is_mono_repo:
387+
setup_py_path = f"{input}/setup.py"
388+
if os.path.exists(setup_py_path):
389+
shutil.copy(
390+
setup_py_path,
391+
f"{output}/setup.py",
392+
)
393+
owlbot_py_path = f"{input}/owlbot.py"
394+
if os.path.exists(owlbot_py_path):
395+
shutil.copy(
396+
owlbot_py_path,
397+
f"{output}/owlbot.py",
398+
)
399+
376400
# copy post-procesing files
377401
for post_processing_file in glob.glob(
378402
f"{input}/client-post-processing/*.yaml"
@@ -385,7 +409,7 @@ def _copy_files_needed_for_post_processing(output: str, input: str, library_id:
385409
)
386410

387411

388-
def _clean_up_files_after_post_processing(output: str, library_id: str):
412+
def _clean_up_files_after_post_processing(output: str, library_id: str, is_mono_repo: bool):
389413
"""
390414
Clean up files which should not be included in the generated client.
391415
This function is idempotent and will not fail if files are already removed.
@@ -394,8 +418,9 @@ def _clean_up_files_after_post_processing(output: str, library_id: str):
394418
output(str): Path to the directory in the container where code
395419
should be generated.
396420
library_id(str): The library id to be used for post processing.
421+
is_mono_repo(bool): True if the current repository is a mono-repo.
397422
"""
398-
path_to_library = f"packages/{library_id}"
423+
path_to_library = f"packages/{library_id}" if is_mono_repo else "."
399424

400425
# Safely remove directories, ignoring errors if they don't exist.
401426
shutil.rmtree(f"{output}/{path_to_library}/.nox", ignore_errors=True)
@@ -486,7 +511,7 @@ def _create_repo_metadata_from_service_config(
486511

487512

488513
def _generate_repo_metadata_file(
489-
output: str, library_id: str, source: str, apis: List[Dict]
514+
output: str, library_id: str, source: str, apis: List[Dict], is_mono_repo: bool
490515
):
491516
"""Generates the .repo-metadata.json file from the primary API service config.
492517
@@ -495,8 +520,9 @@ def _generate_repo_metadata_file(
495520
library_id (str): The ID of the library.
496521
source (str): The path to the source directory.
497522
apis (List[Dict]): A list of APIs to generate.
523+
is_mono_repo(bool): True if the current repository is a mono-repo.
498524
"""
499-
path_to_library = f"packages/{library_id}"
525+
path_to_library = f"packages/{library_id}" if is_mono_repo else "."
500526
output_repo_metadata = f"{output}/{path_to_library}/.repo-metadata.json"
501527

502528
# TODO(https://github.com/googleapis/librarian/issues/2334)): If `.repo-metadata.json`
@@ -523,7 +549,7 @@ def _generate_repo_metadata_file(
523549
_write_json_file(output_repo_metadata, metadata_content)
524550

525551

526-
def _copy_readme_to_docs(output: str, library_id: str):
552+
def _copy_readme_to_docs(output: str, library_id: str, is_mono_repo: bool):
527553
"""Copies the README.rst file for a generated library to docs/README.rst.
528554
529555
This function is robust against various symlink configurations that could
@@ -536,7 +562,7 @@ def _copy_readme_to_docs(output: str, library_id: str):
536562
should be generated.
537563
library_id(str): The library id.
538564
"""
539-
path_to_library = f"packages/{library_id}"
565+
path_to_library = f"packages/{library_id}" if is_mono_repo else "."
540566
source_path = f"{output}/{path_to_library}/README.rst"
541567
docs_path = f"{output}/{path_to_library}/docs"
542568
destination_path = f"{docs_path}/README.rst"
@@ -593,6 +619,7 @@ def handle_generate(
593619
"""
594620

595621
try:
622+
is_mono_repo = _is_mono_repo(input)
596623
# Read a generate-request.json file
597624
request_data = _read_json_file(f"{librarian}/{GENERATE_REQUEST_FILE}")
598625
library_id = _get_library_id(request_data)
@@ -601,12 +628,16 @@ def handle_generate(
601628
for api in apis_to_generate:
602629
api_path = api.get("path")
603630
if api_path:
604-
_generate_api(api_path, library_id, source, output, version)
605-
_copy_files_needed_for_post_processing(output, input, library_id)
606-
_generate_repo_metadata_file(output, library_id, source, apis_to_generate)
607-
_run_post_processor(output, library_id)
631+
_generate_api(
632+
api_path, library_id, source, output, version, is_mono_repo
633+
)
634+
_copy_files_needed_for_post_processing(output, input, library_id, is_mono_repo)
635+
_generate_repo_metadata_file(
636+
output, library_id, source, apis_to_generate, is_mono_repo
637+
)
638+
_run_post_processor(output, library_id, is_mono_repo)
608639
_copy_readme_to_docs(output, library_id)
609-
_clean_up_files_after_post_processing(output, library_id)
640+
_clean_up_files_after_post_processing(output, library_id, is_mono_repo)
610641
except Exception as e:
611642
raise ValueError("Generation failed.") from e
612643
logger.info("'generate' command executed.")
@@ -821,7 +852,12 @@ def _stage_gapic_library(tmp_dir: str, staging_dir: str) -> None:
821852

822853

823854
def _generate_api(
824-
api_path: str, library_id: str, source: str, output: str, gapic_version: str
855+
api_path: str,
856+
library_id: str,
857+
source: str,
858+
output: str,
859+
gapic_version: str,
860+
is_mono_repo: bool,
825861
):
826862
"""
827863
Handles the generation and staging process for a single API path.
@@ -833,6 +869,7 @@ def _generate_api(
833869
output (str): Path to the output directory where code should be staged.
834870
gapic_version(str): The desired version number for the GAPIC client library
835871
in a format which follows PEP-440.
872+
is_mono_repo(bool): True if the current repository is a mono-repo.
836873
"""
837874
py_gapic_config = _read_bazel_build_py_rule(api_path, source)
838875
is_proto_only_library = len(py_gapic_config) == 0
@@ -854,9 +891,10 @@ def _generate_api(
854891
staging_child_directory = _get_staging_child_directory(
855892
api_path, is_proto_only_library
856893
)
857-
staging_dir = os.path.join(
858-
output, "owl-bot-staging", library_id, staging_child_directory
859-
)
894+
staging_dir = os.path.join(output, "owl-bot-staging")
895+
if is_mono_repo:
896+
staging_dir = os.path.join(staging_dir, library_id)
897+
staging_dir = os.path.join(staging_dir, staging_child_directory)
860898

861899
# 4. Stage the generated code
862900
if is_proto_only_library:

.generator/test_cli.py

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -659,6 +659,8 @@ def test_handle_generate_success(
659659
"""
660660
caplog.set_level(logging.INFO)
661661

662+
is_mono_repo = False
663+
662664
mock_generate_api = mocker.patch("cli._generate_api")
663665
mock_run_post_processor = mocker.patch("cli._run_post_processor")
664666
mock_copy_files_needed_for_post_processing = mocker.patch(
@@ -671,12 +673,12 @@ def test_handle_generate_success(
671673

672674
handle_generate()
673675

674-
mock_run_post_processor.assert_called_once_with("output", "google-cloud-language")
676+
mock_run_post_processor.assert_called_once_with("output", "google-cloud-language", is_mono_repo)
675677
mock_copy_files_needed_for_post_processing.assert_called_once_with(
676-
"output", "input", "google-cloud-language"
678+
"output", "input", "google-cloud-language", is_mono_repo
677679
)
678680
mock_clean_up_files_after_post_processing.assert_called_once_with(
679-
"output", "google-cloud-language"
681+
"output", "google-cloud-language", is_mono_repo
680682
)
681683
mock_generate_api.assert_called_once()
682684

@@ -836,8 +838,9 @@ def test_copy_files_needed_for_post_processing_copies_metadata_if_exists(mocker)
836838
mock_makedirs = mocker.patch("os.makedirs")
837839
mock_shutil_copy = mocker.patch("shutil.copy")
838840
mocker.patch("os.path.exists", return_value=True)
841+
is_mono_repo = True
839842

840-
_copy_files_needed_for_post_processing("output", "input", "library_id")
843+
_copy_files_needed_for_post_processing("output", "input", "library_id", is_mono_repo)
841844

842845
mock_shutil_copy.assert_called_once()
843846
mock_makedirs.assert_called()
@@ -848,8 +851,9 @@ def test_copy_files_needed_for_post_processing_skips_metadata_if_not_exists(mock
848851
mock_makedirs = mocker.patch("os.makedirs")
849852
mock_shutil_copy = mocker.patch("shutil.copy")
850853
mocker.patch("os.path.exists", return_value=False)
854+
is_mono_repo = True
851855

852-
_copy_files_needed_for_post_processing("output", "input", "library_id")
856+
_copy_files_needed_for_post_processing("output", "input", "library_id", is_mono_repo)
853857

854858
mock_shutil_copy.assert_not_called()
855859
mock_makedirs.assert_called()
@@ -1395,8 +1399,9 @@ def test_generate_repo_metadata_file(mocker):
13951399
"path": "google/cloud/language/v1",
13961400
}
13971401
]
1402+
is_mono_repo = True
13981403

1399-
_generate_repo_metadata_file(output, library_id, source, apis)
1404+
_generate_repo_metadata_file(output, library_id, source, apis, is_mono_repo)
14001405

14011406
mock_create_metadata.assert_called_once_with(
14021407
"service_config.yaml", "google/cloud/language/v1", source, library_id
@@ -1412,8 +1417,9 @@ def test_generate_repo_metadata_file_skips_if_exists(mocker):
14121417
mock_write_json = mocker.patch("cli._write_json_file")
14131418
mock_create_metadata = mocker.patch("cli._create_repo_metadata_from_service_config")
14141419
mocker.patch("os.path.exists", return_value=True)
1420+
is_mono_repo = True
14151421

1416-
_generate_repo_metadata_file("output", "library_id", "source", [])
1422+
_generate_repo_metadata_file("output", "library_id", "source", [], is_mono_repo)
14171423

14181424
mock_create_metadata.assert_not_called()
14191425
mock_write_json.assert_not_called()

0 commit comments

Comments
 (0)