diff --git a/.generator/Dockerfile b/.generator/Dockerfile index 375aaabd9941..58e16889b94a 100644 --- a/.generator/Dockerfile +++ b/.generator/Dockerfile @@ -22,10 +22,16 @@ RUN apt-get update && \ # Essential for compiling C code build-essential \ # For downloading secure files + git \ wget \ ca-certificates \ # For running bazelisk commands openjdk-17-jdk \ + zip \ + unzip \ + # To avoid bazel error + # "python interpreter `python3` not found in PATH" + python3-dev \ # --- Critical libraries for a complete Python build --- libssl-dev \ zlib1g-dev \ @@ -67,10 +73,39 @@ RUN wget https://github.com/bazelbuild/bazelisk/releases/download/${BAZELISK_VER # Set the working directory for build-related tasks. WORKDIR /app +# Create the group and user, but only if they don't already exist. +ARG UID=1000 +ARG GID=1000 + +RUN if ! getent group $GID > /dev/null; then \ + groupadd -g $GID myuser; \ + fi && \ + if ! getent passwd $UID > /dev/null; then \ + useradd -u $UID -g $GID -ms /bin/bash myuser; \ + fi + +# Set ownership of the app directory now, before we copy files into it. +RUN mkdir -p /app && chown $UID:$GID /app + +# We'll point both to the /bazel_cache directory which will be mounted as a volume. +ENV BAZELISK_HOME="/bazel_cache/bazelisk" +ENV BAZEL_HOME="/bazel_cache/bazel" + +# Ensure the cache directories within the non-root user's context exist and are writable. +# This is crucial as Bazel creates subdirectories under BAZEL_HOME. +RUN mkdir -p ${BAZEL_HOME}/_bazel_ubuntu/cache/repos \ + ${BAZEL_HOME}/_bazel_ubuntu/output_base \ + ${BAZELISK_HOME} && \ + chown -R $UID:$GID ${BAZEL_HOME} ${BAZELISK_HOME} + +RUN /usr/local/bin/python3.9 -m venv bazel_env +RUN . bazel_env/bin/activate + +RUN git clone https://github.com/googleapis/googleapis.git \ + && cd googleapis \ + && bazelisk --output_base=/bazel_cache/_bazel_ubuntu/output_base build --disk_cache=/bazel_cache/_bazel_ubuntu/cache/repos --incompatible_strict_action_env //google/cloud/language/v1:language-v1-py + # TODO(https://github.com/googleapis/librarian/issues/904): Install protoc for gencode. -# TODO(https://github.com/googleapis/librarian/issues/907): Install Python dependencies from requirements.in. -# TODO(https://github.com/googleapis/librarian/issues/905): Install Synthtool by cloning its repo. -# TODO(https://github.com/googleapis/librarian/issues/906): Clone googleapis and run bazelisk build. # --- Final Stage --- # This stage creates the lightweight final image, copying only the @@ -82,21 +117,58 @@ FROM marketplace.gcr.io/google/ubuntu2404 RUN apt-get update && \ apt-get install -y --no-install-recommends \ ca-certificates \ + git \ libssl3 \ zlib1g \ libbz2-1.0 \ libffi8 \ libsqlite3-0 \ libreadline8 \ + # For running bazelisk commands + openjdk-17-jdk \ + # To avoid bazel error + # "python interpreter `python3` not found in PATH" + python3-dev \ + # To avoid bazel error + # "Cannot find gcc or CC; either correct your path or set the CC environment variable" + build-essential \ + # To avoid bazel error + # unzip command not found + unzip \ && apt-get clean && \ rm -rf /var/lib/apt/lists/* -# TODO(https://github.com/googleapis/librarian/issues/902): Create a dedicate non-root user and -# switch to the non-root user to run subsequent commands. -# Example: -# RUN groupadd --system --gid 1000 appgroup && \ -# useradd --system --uid 1000 --gid appgroup appuser -# USER appuser +# Create the group and user, but only if they don't already exist. +# NOTE: A user needs to be configured by passing down the UID/GID with the +# docker build command to allow the user running the librarian CLI to have +# the permissions within the /app and /bazel_cache directories in the following way: +# DOCKER_BUILDKIT=1 docker build -f .generator/Dockerfile --build-arg UID=$(id -u) --build-arg GID=$(id -g) -t python-librarian-generator:latest . +ARG UID=1000 +ARG GID=1000 + +RUN if ! getent group $GID > /dev/null; then \ + groupadd -g $GID myuser; \ + fi && \ + if ! getent passwd $UID > /dev/null; then \ + useradd -u $UID -g $GID -ms /bin/bash myuser; \ + fi + +# 2025/08/05 08:27:44 could not create directory /.cache/bazelisk: mkdir /.cache: permission denied +# Traceback (most recent call last): +# File "/app/./cli.py", line 147, in _build_bazel_target +# subprocess.run( +# File "/usr/local/lib/python3.9/subprocess.py", line 528, in run +# raise CalledProcessError(retcode, process.args, +# subprocess.CalledProcessError: Command '['bazelisk', '--output_base=/bazel_cache/_bazel_ubuntu/output_base', 'build', '--disk_cache=/bazel_cache/_bazel_ubuntu/cache/repos', '--incompatible_strict_action_env', '//google/cloud/language/v1beta2:language-v1beta2-py']' returned non-zero exit status 1 +# We'll point both to the /bazel_cache directory which will be mounted as a volume. +ENV BAZELISK_HOME="/bazel_cache/bazelisk" +ENV BAZEL_HOME="/bazel_cache/bazel" +RUN mkdir -p ${BAZEL_HOME}/_bazel_ubuntu/cache/repos \ + ${BAZEL_HOME}/_bazel_ubuntu/output_base \ + ${BAZELISK_HOME} && \ + chown -R $UID:$GID ${BAZEL_HOME} ${BAZELISK_HOME} + +USER $UID # Copy all Python interpreters, their pip executables, and their standard libraries from the builder. COPY --from=builder /usr/local/bin/python3.9 /usr/local/bin/ @@ -114,11 +186,31 @@ COPY --from=builder /usr/local/lib/python3.12 /usr/local/lib/python3.12 COPY --from=builder /usr/local/bin/python3.13 /usr/local/bin/ COPY --from=builder /usr/local/lib/python3.13 /usr/local/lib/python3.13 +# Copy the bazelisk executable from the builder. +COPY --from=builder /usr/local/bin/bazelisk /usr/local/bin/ + +# Copy bazel cache from the builder. +COPY --from=builder --chown=$UID:$GID /bazel_cache /bazel_cache + # Set the working directory in the container. WORKDIR /app +# Create a virtual env and set the Path to fix the missing nox error +# when running the post processor changes. +RUN /usr/local/bin/python3.9 -m venv bazel_env +RUN . bazel_env/bin/activate + +ENV PATH=/app/bazel_env/bin:$PATH + +RUN git clone --depth 1 https://github.com/googleapis/synthtool.git /tmp/synthtool && \ +bazel_env/bin/python3.9 -m pip install /tmp/synthtool nox && \ +rm -rf /tmp/synthtool + # Copy the CLI script into the container. -COPY .generator/cli.py . +COPY --chown=$UID:$GID .generator/cli.py . +RUN chmod a+rx ./cli.py # Set the entrypoint for the container to run the script. -ENTRYPOINT ["python3.11", "./cli.py"] \ No newline at end of file +# NOTE: The entrypoint is set to `/app/bazel_env/bin/python3.9` instead of `python3.9` to allow +# the post processor to pick up the installed nox module. +ENTRYPOINT ["/app/bazel_env/bin/python3.9", "./cli.py"] \ No newline at end of file diff --git a/.generator/cli.py b/.generator/cli.py index 417ef2d0a922..e9278c2e51e1 100644 --- a/.generator/cli.py +++ b/.generator/cli.py @@ -19,11 +19,15 @@ import subprocess import sys import subprocess +import shutil +import glob + from typing import Dict, List try: import synthtool from synthtool import gcp + from synthtool.languages import python_mono_repo SYNTHTOOL_INSTALLED = True SYNTHTOOL_IMPORT_ERROR = None @@ -35,10 +39,44 @@ LIBRARIAN_DIR = "librarian" GENERATE_REQUEST_FILE = "generate-request.json" -SOURCE_DIR = "source" +BUILD_REQUEST_FILE = "build-request.json" +SOURCE_DIR = "/source" OUTPUT_DIR = "output" +INPUT_DIR = "/input" REPO_DIR = "repo" +def _copy_files_needed_for_post_processing(output: str, input: str, library_id: str): + """Copy files to the output directory whcih are needed during the post processing + step, such as .repo-metadata.json and script/client-post-processing, using + the input directory as the source. + Args: + output(str): Path to the directory in the container where code + should be generated. + input(str): The path to the directory in the container + which contains additional generator input. + library_id(str): The library id to be used for post processing. + """ + + path_to_library = f"packages/{library_id}" + + # We need to create these directories so that we can copy files necessary for post-processing. + os.makedirs(f"{output}/{path_to_library}", exist_ok=True) + os.makedirs(f"{output}/{path_to_library}/scripts/client-post-processing", exist_ok=True) + print(f"{input}/{path_to_library}/.repo-metadata.json") + print(f"{output}/{path_to_library}/.repo-metadata.json") + + shutil.move( + f"{input}/{path_to_library}/.repo-metadata.json", + f"{output}/{path_to_library}/.repo-metadata.json", + ) + # copy post-procesing files + for post_processing_file in glob.glob(f"{input}/client-post-processing/*.yaml"): + with open(post_processing_file, "r") as post_processing: + if f"{path_to_library}/" in post_processing.read(): + shutil.move( + post_processing_file, + f"{output}/{path_to_library}/scripts/client-post-processing", + ) def _read_json_file(path: str) -> Dict: """Helper function that reads a json file path and returns the loaded json content. @@ -63,37 +101,54 @@ def handle_configure(): logger.info("'configure' command executed.") +import os +import re +import logging + +# Assume logger and SOURCE_DIR are defined +# logging.basicConfig(level=logging.INFO) +# logger = logging.getLogger(__name__) +# SOURCE_DIR = "." + def _determine_bazel_rule(api_path: str) -> str: - """Executes a `bazelisk query` to find a Bazel rule. + """Finds a Bazel rule by parsing the BUILD.bazel file directly. Args: - api_path (str): The API path to query for. + api_path (str): The API path, e.g., 'google/cloud/language/v1'. Returns: - str: The discovered Bazel rule. + str: The discovered Bazel rule, e.g., '//google/cloud/language/v1:language-v1-py'. Raises: - ValueError: If the subprocess call fails or returns an empty result. + ValueError: If the file can't be processed or no matching rule is found. """ - logger.info(f"Determining Bazel rule for api_path: '{api_path}'") + logger.info(f"Determining Bazel rule for api_path: '{api_path}' by parsing file.") try: - query = f'filter("-py$", kind("rule", //{api_path}/...:*))' - command = ["bazelisk", "query", query] - result = subprocess.run( - command, - cwd=f"{SOURCE_DIR}/googleapis", - capture_output=True, - text=True, - check=True, + build_file_path = os.path.join( + SOURCE_DIR, api_path, "BUILD.bazel" ) - bazel_rule = result.stdout.strip() - if not bazel_rule: - raise ValueError(f"Bazelisk query `{query}` returned an empty bazel rule.") + + with open(build_file_path, "r") as f: + content = f.read() + + match = re.search(r'name\s*=\s*"([^"]+-py)"', content) + + # This check is for a logical failure (no match), not a runtime exception. + # It's good to keep it for clear error messaging. + if not match: + raise ValueError( + f"No Bazel rule with a name ending in '-py' found in {build_file_path}" + ) + + rule_name = match.group(1) + bazel_rule = f"//{api_path}:{rule_name}" logger.info(f"Found Bazel rule: {bazel_rule}") return bazel_rule + except Exception as e: - raise ValueError(f"Bazelisk query `{query}` failed") from e + + raise ValueError(f"Failed to determine Bazel rule for '{api_path}' by parsing.") from e def _get_library_id(request_data: Dict) -> str: @@ -110,25 +165,26 @@ def _get_library_id(request_data: Dict) -> str: """ library_id = request_data.get("id") if not library_id: - raise ValueError("Request file is missing required 'id' field.") + raise ValueError(f"Request file is missing required 'id' field. {request_data}") return library_id -def _build_bazel_target(bazel_rule: str): +def _build_bazel_target(bazel_rule: str, source: str = SOURCE_DIR): """Executes `bazelisk build` on a given Bazel rule. Args: bazel_rule (str): The Bazel rule to build. + source (str): The path to the root of the Bazel workspace. Raises: ValueError: If the subprocess call fails. """ logger.info(f"Executing build for rule: {bazel_rule}") try: - command = ["bazelisk", "build", bazel_rule] + command = ["bazelisk", "--output_base=/bazel_cache/_bazel_ubuntu/output_base", "build", "--disk_cache=/bazel_cache/_bazel_ubuntu/cache/repos", "--incompatible_strict_action_env", bazel_rule] subprocess.run( command, - cwd=f"{SOURCE_DIR}/googleapis", + cwd=source, text=True, check=True, ) @@ -137,12 +193,21 @@ def _build_bazel_target(bazel_rule: str): raise ValueError(f"Bazel build for {bazel_rule} rule failed.") from e -def _locate_and_extract_artifact(bazel_rule: str, library_id: str): +def _locate_and_extract_artifact( + bazel_rule: str, + library_id: str, + output: str, + api_path: str, + source: str = SOURCE_DIR, +): """Finds and extracts the tarball artifact from a Bazel build. Args: bazel_rule (str): The Bazel rule that was built. library_id (str): The ID of the library being generated. + source (str): The path to the root of the Bazel workspace. + output (str): The path to the location where generated output + should be stored. Raises: ValueError: If failed to locate or extract artifact. @@ -150,10 +215,10 @@ def _locate_and_extract_artifact(bazel_rule: str, library_id: str): try: # 1. Find the bazel-bin output directory. logger.info("Locating Bazel output directory...") - info_command = ["bazelisk", "info", "bazel-bin"] + info_command = ["bazelisk", "--output_base=/bazel_cache/_bazel_ubuntu/output_base", "info", "bazel-bin"] result = subprocess.run( info_command, - cwd=f"{SOURCE_DIR}/googleapis", + cwd=source, text=True, check=True, capture_output=True, @@ -167,14 +232,15 @@ def _locate_and_extract_artifact(bazel_rule: str, library_id: str): logger.info(f"Found artifact at: {tarball_path}") # 3. Create a staging directory. - staging_dir = os.path.join(OUTPUT_DIR, "owl-bot-staging", library_id) + api_version = api_path.split("/")[-1] + staging_dir = os.path.join(output, "owl-bot-staging", library_id, api_version) os.makedirs(staging_dir, exist_ok=True) logger.info(f"Preparing staging directory: {staging_dir}") # 4. Extract the artifact. extract_command = ["tar", "-xvf", tarball_path, "--strip-components=1"] subprocess.run( - extract_command, cwd=staging_dir, capture_output=True, text=True, check=True + extract_command, cwd=staging_dir, text=True, check=True ) logger.info(f"Artifact {tarball_path} extracted successfully.") @@ -184,19 +250,27 @@ def _locate_and_extract_artifact(bazel_rule: str, library_id: str): ) from e -def _run_post_processor(): +def _run_post_processor(output_path: str, library_id: str): """Runs the synthtool post-processor on the output directory. + + Args: + output_path(str): path to the output directory """ logger.info("Running Python post-processor...") if SYNTHTOOL_INSTALLED: - command = ["python3", "-m", "synthtool.languages.python_mono_repo"] - subprocess.run(command, cwd=OUTPUT_DIR, text=True, check=True) + # command = ["python3.9", "-m", "synthtool.languages.python_mono_repo"] + os.chdir(output_path) + path_to_library = f"{output_path}/packages/{library_id}" + python_mono_repo.owlbot_main(path_to_library) + # subprocess.run(command, cwd=output_path, text=True, check=True, capture_output=False) else: raise SYNTHTOOL_IMPORT_ERROR logger.info("Python post-processor ran successfully.") -def handle_generate(): +def handle_generate( + librarian: str = LIBRARIAN_DIR, source: str = SOURCE_DIR, output: str = OUTPUT_DIR, input: str = INPUT_DIR +): """The main coordinator for the code generation process. This function orchestrates the generation of a client library by reading a @@ -209,16 +283,25 @@ def handle_generate(): try: # Read a generate-request.json file - request_data = _read_json_file(f"{LIBRARIAN_DIR}/{GENERATE_REQUEST_FILE}") + request_data = _read_json_file(f"{librarian}/{GENERATE_REQUEST_FILE}") library_id = _get_library_id(request_data) - for api in request_data.get("apis", []): api_path = api.get("path") if api_path: bazel_rule = _determine_bazel_rule(api_path) - _build_bazel_target(bazel_rule) - _locate_and_extract_artifact(bazel_rule, library_id) - _run_post_processor() + _build_bazel_target(bazel_rule, source) + print("succesfully built bazel target.") + _locate_and_extract_artifact(bazel_rule, library_id, output, source) + print("succesfully located and extracted bazel tarball.") + + _copy_files_needed_for_post_processing(output, input, library_id) + _run_post_processor(output, library_id) + print("succesfully ran Python Post Processor.") + + # Write the `generate-response.json` using `generate-request.json` as the source + with open(f"{librarian}/generate-response.json", "w") as f: + json.dump(request_data, f, indent=4) + f.write("\n") except Exception as e: raise ValueError("Generation failed.") from e @@ -227,16 +310,17 @@ def handle_generate(): logger.info("'generate' command executed.") -def _run_nox_sessions(sessions: List[str]): +def _run_nox_sessions(sessions: List[str], librarian_path: str = LIBRARIAN_DIR): """Calls nox for all specified sessions. Args: path(List[str]): The list of nox sessions to run. + librarian_path(str): The path to the librarian build configuration directory """ - # Read a generate-request.json file + # Read a build-request.json file current_session = None try: - request_data = _read_json_file(f"{LIBRARIAN_DIR}/{GENERATE_REQUEST_FILE}") + request_data = _read_json_file(f"{librarian_path}/{BUILD_REQUEST_FILE}") library_id = _get_library_id(request_data) for nox_session in sessions: _run_individual_session(nox_session, library_id) @@ -263,7 +347,7 @@ def _run_individual_session(nox_session: str, library_id: str): logger.info(result) -def handle_build(): +def handle_build(librarian: str = LIBRARIAN_DIR): """The main coordinator for validating client library generation.""" sessions = [ "unit-3.9", @@ -278,7 +362,7 @@ def handle_build(): "mypy", "check_lower_bounds", ] - _run_nox_sessions(sessions) + _run_nox_sessions(sessions, librarian) logger.info("'build' command executed.") @@ -303,10 +387,43 @@ def handle_build(): ]: parser_cmd = subparsers.add_parser(command_name, help=help_text) parser_cmd.set_defaults(func=handler_map[command_name]) + parser_cmd.add_argument( + "--librarian", + type=str, + help="Path to the directory in the container which contains the librarian configuration", + default=LIBRARIAN_DIR, + ) + parser_cmd.add_argument( + "--input", + type=str, + help="Path to the directory in the container which contains additional generator input", + default="/input", + ) + parser_cmd.add_argument( + "--output", + type=str, + help="Path to the directory in the container where code should be generated", + default=OUTPUT_DIR, + ) + parser_cmd.add_argument( + "--source", + type=str, + help="Path to the directory in the container which contains API protos", + default=SOURCE_DIR, + ) if len(sys.argv) == 1: parser.print_help(sys.stderr) sys.exit(1) args = parser.parse_args() - args.func() + + # Pass specific arguments to the handler functions for generate/build + if args.command == "generate": + args.func( + librarian=args.librarian, source=args.source, output=args.output + ) + elif args.command == "build": + args.func(librarian=args.librarian) + else: + args.func() \ No newline at end of file diff --git a/.generator/requirements.in b/.generator/requirements.in new file mode 100644 index 000000000000..e3621ce9af25 --- /dev/null +++ b/.generator/requirements.in @@ -0,0 +1,17 @@ +gapic-generator +click +google-api-core +googleapis-common-protos +jinja2 +MarkupSafe +protobuf +pypandoc +PyYAML +grpc-google-iam-v1 +proto-plus +pytest-asyncio +libcst +inflection +aiohttp +black +isort \ No newline at end of file diff --git a/.librarian/generator-input/packages/google-cloud-language/.repo-metadata.json b/.librarian/generator-input/packages/google-cloud-language/.repo-metadata.json new file mode 100644 index 000000000000..78080dbce605 --- /dev/null +++ b/.librarian/generator-input/packages/google-cloud-language/.repo-metadata.json @@ -0,0 +1,18 @@ +{ + "name": "language", + "name_pretty": "Natural Language", + "product_documentation": "https://cloud.google.com/natural-language/docs/", + "client_documentation": "https://cloud.google.com/python/docs/reference/language/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559753", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-language", + "api_id": "language.googleapis.com", + "requires_billing": true, + "default_version": "v1", + "codeowner_team": "@googleapis/cdpe-cloudai", + "api_shortname": "language", + "api_description": "provides natural language understanding technologies to developers, including sentiment analysis, entity analysis, entity sentiment analysis, content classification, and syntax analysis. This API is part of the larger Cloud Machine Learning API family." +} diff --git a/.librarian/state.yaml b/.librarian/state.yaml index 5b496e7d6f4d..641e19c8eda5 100644 --- a/.librarian/state.yaml +++ b/.librarian/state.yaml @@ -1,16 +1,23 @@ -image: google-cloud-python-generator:latest +image: python-librarian-generator:latest libraries: -- apis: +- id: google-cloud-language + version: 2.17.2 + last_generated_commit: 97a83d76a09a7f6dcab43675c87bdfeb5bcf1cb5 + apis: - path: google/cloud/language/v1beta2 - service_config: '' - path: google/cloud/language/v2 - service_config: '' - path: google/cloud/language/v1 - service_config: '' - id: google-cloud-language - last_generated_commit: 97a83d76a09a7f6dcab43675c87bdfeb5bcf1cb5 - preserve_regex: '' - remove_regex: '' - sourcePaths: - - packages/google-cloud-language - version: 2.17.2 + source_roots: + - ^packages/google-cloud-language + preserve_regex: + - .OwlBot.yaml + - CHANGELOG.md + - docs/CHANGELOG.md + - docs/README.rst + - samples/README.txt + - tar.gz + - gapic_version.py + - samples/generated_samples/snippet_metadata_ + - scripts/client-post-processing + remove_regex: + - ^packages/google-cloud-language \ No newline at end of file diff --git a/cloudbuild.yaml b/cloudbuild.yaml index 13903f79a9d4..8d992c68c548 100644 --- a/cloudbuild.yaml +++ b/cloudbuild.yaml @@ -16,28 +16,22 @@ # Reduce this timeout by moving the installation of Python runtimes to a separate base image timeout: 7200s # 2 hours for the first uncached run, can be lowered later. steps: - # STEP 1: Pull the latest version of the image to use as a cache. - # The '|| exit 0' part ensures the build doesn't fail if the image - # doesn't exist yet (e.g., on the very first run). - - name: 'gcr.io/cloud-builders/docker' - entrypoint: 'bash' - args: ['-c', 'docker pull gcr.io/$PROJECT_ID/python-librarian-generator:latest || exit 0'] - - # STEP 2: Build the new image, using the pulled image as a cache source. - - name: 'gcr.io/cloud-builders/docker' + # A single step using the Kaniko executor to build and cache + - name: 'gcr.io/kaniko-project/executor:latest' args: - - 'build' - - '--tag=gcr.io/$PROJECT_ID/python-librarian-generator:latest' - - '--cache-from=gcr.io/$PROJECT_ID/python-librarian-generator:latest' - - '--file=.generator/Dockerfile' - - '.' + # Specifies the Dockerfile path + - '--dockerfile=.generator/Dockerfile' + # Specifies the build context directory + - '--context=.' + # The final destination for the image + - '--destination=gcr.io/$PROJECT_ID/python-librarian-generator:latest' + # Enables Kaniko's remote registry caching + - '--cache=true' + # (Optional but recommended) Sets a time-to-live for cache layers + - '--cache-ttl=24h' -# STEP 3: Push the newly built image to the registry so it can be used -# as the cache for the next run. Cloud Build does this automatically if -# the image is listed in the 'images' section. -images: - - 'gcr.io/$PROJECT_ID/python-librarian-generator:latest' +# The 'images' section is no longer needed because Kaniko pushes the image itself. -# This section automatically creates a storage bucket for storing docker build logs. options: - default_logs_bucket_behavior: REGIONAL_USER_OWNED_BUCKET \ No newline at end of file + default_logs_bucket_behavior: REGIONAL_USER_OWNED_BUCKET + machineType: E2_HIGHCPU_32 \ No newline at end of file