1818import json
1919import logging
2020import os
21+ import parse_googleapis_content
2122import re
2223import shutil
2324import subprocess
2425import sys
2526import yaml
2627from datetime import datetime
28+ import tempfile
2729from pathlib import Path
2830from typing import Dict , List
2931
@@ -73,6 +75,7 @@ def _write_text_file(path: str, updated_content: str):
7375 updated_content(str): The contents to write to the file.
7476 """
7577
78+ os .makedirs (Path (path ).parent , exist_ok = True )
7679 with open (path , "w" ) as f :
7780 f .write (updated_content )
7881
@@ -113,46 +116,6 @@ def handle_configure():
113116 logger .info ("'configure' command executed." )
114117
115118
116- def _determine_bazel_rule (api_path : str , source : str ) -> str :
117- """Finds a Bazel rule by parsing the BUILD.bazel file directly.
118-
119- Args:
120- api_path (str): The API path, e.g., 'google/cloud/language/v1'.
121- source(str): The path to the root of the Bazel workspace.
122-
123- Returns:
124- str: The discovered Bazel rule, e.g., '//google/cloud/language/v1:language-v1-py'.
125-
126- Raises:
127- ValueError: If the file can't be processed or no matching rule is found.
128- """
129- logger .info (f"Determining Bazel rule for api_path: '{ api_path } ' by parsing file." )
130- try :
131- build_file_path = os .path .join (source , api_path , "BUILD.bazel" )
132-
133- with open (build_file_path , "r" ) as f :
134- content = f .read ()
135-
136- match = re .search (r'name\s*=\s*"([^"]+-py)"' , content )
137-
138- # This check is for a logical failure (no match), not a runtime exception.
139- # It's good to keep it for clear error messaging.
140- if not match : # pragma: NO COVER
141- raise ValueError (
142- f"No Bazel rule with a name ending in '-py' found in { build_file_path } "
143- )
144-
145- rule_name = match .group (1 )
146- bazel_rule = f"//{ api_path } :{ rule_name } "
147-
148- logger .info (f"Found Bazel rule: { bazel_rule } " )
149- return bazel_rule
150- except Exception as e :
151- raise ValueError (
152- f"Failed to determine Bazel rule for '{ api_path } ' by parsing."
153- ) from e
154-
155-
156119def _get_library_id (request_data : Dict ) -> str :
157120 """Retrieve the library id from the given request dictionary
158121
@@ -171,107 +134,6 @@ def _get_library_id(request_data: Dict) -> str:
171134 return library_id
172135
173136
174- def _build_bazel_target (bazel_rule : str , source : str ):
175- """Executes `bazelisk build` on a given Bazel rule.
176-
177- Args:
178- bazel_rule(str): The Bazel rule to build.
179- source(str): The path to the root of the Bazel workspace.
180-
181- Raises:
182- ValueError: If the subprocess call fails.
183- """
184- logger .info (f"Executing build for rule: { bazel_rule } " )
185- try :
186- # We're using the prewarmed bazel cache from the docker image to speed up the bazelisk commands.
187- # Previously built artifacts are stored in `/bazel_cache/_bazel_ubuntu/output_base` and will be
188- # used to speed up the build. `disk_cache` is used as the 'remote cache' and is also prewarmed as part of
189- # the docker image.
190- # See https://bazel.build/remote/caching#disk-cache which explains using a file system as a 'remote cache'.
191- command = [
192- "bazelisk" ,
193- "--output_base=/bazel_cache/_bazel_ubuntu/output_base" ,
194- "build" ,
195- "--disk_cache=/bazel_cache/_bazel_ubuntu/cache/repos" ,
196- "--incompatible_strict_action_env" ,
197- bazel_rule ,
198- ]
199- subprocess .run (
200- command ,
201- cwd = source ,
202- text = True ,
203- check = True ,
204- )
205- logger .info (f"Bazel build for { bazel_rule } rule completed successfully." )
206- except Exception as e :
207- raise ValueError (f"Bazel build for { bazel_rule } rule failed." ) from e
208-
209-
210- def _locate_and_extract_artifact (
211- bazel_rule : str ,
212- library_id : str ,
213- source : str ,
214- output : str ,
215- api_path : str ,
216- ):
217- """Finds and extracts the tarball artifact from a Bazel build.
218-
219- Args:
220- bazel_rule(str): The Bazel rule that was built.
221- library_id(str): The ID of the library being generated.
222- source(str): The path to the root of the Bazel workspace.
223- output(str): The path to the location where generated output
224- should be stored.
225- api_path(str): The API path for the artifact
226-
227- Raises:
228- ValueError: If failed to locate or extract artifact.
229- """
230- try :
231- # 1. Find the bazel-bin output directory.
232- logger .info ("Locating Bazel output directory..." )
233- # Previously built artifacts are stored in `/bazel_cache/_bazel_ubuntu/output_base`.
234- # See `--output_base` in `_build_bazel_target`
235- info_command = [
236- "bazelisk" ,
237- "--output_base=/bazel_cache/_bazel_ubuntu/output_base" ,
238- "info" ,
239- "bazel-bin" ,
240- ]
241- result = subprocess .run (
242- info_command ,
243- cwd = source ,
244- text = True ,
245- check = True ,
246- capture_output = True ,
247- )
248- bazel_bin_path = result .stdout .strip ()
249-
250- # 2. Construct the path to the generated tarball.
251- rule_path , rule_name = bazel_rule .split (":" )
252- tarball_name = f"{ rule_name } .tar.gz"
253- tarball_path = os .path .join (bazel_bin_path , rule_path .strip ("/" ), tarball_name )
254- logger .info (f"Found artifact at: { tarball_path } " )
255-
256- # 3. Create a staging directory.
257- api_version = api_path .split ("/" )[- 1 ]
258- staging_dir = os .path .join (output , "owl-bot-staging" , library_id , api_version )
259- os .makedirs (staging_dir , exist_ok = True )
260- logger .info (f"Preparing staging directory: { staging_dir } " )
261-
262- # 4. Extract the artifact.
263- extract_command = ["tar" , "-xvf" , tarball_path , "--strip-components=1" ]
264- subprocess .run (
265- extract_command , cwd = staging_dir , capture_output = True , text = True , check = True
266- )
267- logger .info (f"Artifact { tarball_path } extracted successfully." )
268-
269- except Exception as e :
270- raise ValueError (
271- f"Failed to locate or extract artifact for { bazel_rule } rule"
272- ) from e
273-
274-
275137def _run_post_processor (output : str , library_id : str ):
276138 """Runs the synthtool post-processor on the output directory.
277139
@@ -399,11 +261,49 @@ def handle_generate(
399261 for api in request_data .get ("apis" , []):
400262 api_path = api .get ("path" )
401263 if api_path :
402- bazel_rule = _determine_bazel_rule (api_path , source )
403- _build_bazel_target (bazel_rule , source )
404- _locate_and_extract_artifact (
405- bazel_rule , library_id , source , output , api_path
406- )
264+ generator_options = []
265+ with open (f"{ source } /{ api_path } /BUILD.bazel" , "r" ) as f :
266+ content = f .read ()
267+ result = parse_googleapis_content .parse_content (content )
268+ py_gapic_entry = [
269+ key for key in result .keys () if key .endswith ("_py_gapic" )
270+ ][0 ]
271+
272+ config_keys = [
273+ "grpc_service_config" ,
274+ "rest_numeric_enums" ,
275+ "service_yaml" ,
276+ "transport" ,
277+ ]
278+
279+ for key in config_keys :
280+ config_value = result [py_gapic_entry ].get (key , None )
281+ if config_value is not None :
282+ new_key = key .replace ("_" , "-" )
283+ if key == "grpc_service_config" :
284+ new_key = "retry-config"
285+ if new_key == "service-yaml" or new_key == "retry-config" :
286+ generator_options .append (
287+ f"{ new_key } ={ api_path } /{ config_value } ,"
288+ )
289+ else :
290+ generator_options .append (f"{ new_key } ={ config_value } ," )
291+ with tempfile .TemporaryDirectory () as tmp_dir :
292+ generator_command = (
293+ f"protoc { api_path } /*.proto --python_gapic_out={ tmp_dir } "
294+ )
295+ if len (generator_options ):
296+ generator_command += f" --python_gapic_opt=metadata,"
297+ for generator_option in generator_options :
298+ generator_command += generator_option
299+ subprocess .run ([generator_command ], cwd = source , shell = True )
300+ api_version = api_path .split ("/" )[- 1 ]
301+ staging_dir = os .path .join (
302+ output , "owl-bot-staging" , library_id , api_version
303+ )
304+ os .makedirs (staging_dir , exist_ok = True )
305+ logger .info (f"Preparing staging directory: { staging_dir } " )
306+ subprocess .run (f"cp -r { tmp_dir } /. { staging_dir } " , shell = True )
407307
408308 _copy_files_needed_for_post_processing (output , input , library_id )
409309 _run_post_processor (output , library_id )
0 commit comments