diff --git a/.github/workflows/copilotl.yml b/.github/workflows/copilotl.yml new file mode 100644 index 0000000000..336938538e --- /dev/null +++ b/.github/workflows/copilotl.yml @@ -0,0 +1,165 @@ +-- + name: Setup Node.js environment + uses: actions/setup-node@v3.9.1 +# Config for the Gemini Pull Request Review Bot. +# https://github.com/marketplace/gemini-code-assist +have_fun: false +code_review: + disable: false + comment_severity_threshold: 'HIGH' + max_review_comments: -1 + pull_request_opened: + help: false + summary: true + code_review: true +ignore_patterns: [] +name: CI +on: + push: + branches-ignore: + - 'generated' + - 'codegen/**' + - 'integrated/**' + - 'stl-preview-head/**' + - 'stl-preview-base/**' + pull_request: + branches-ignore: + - 'stl-preview-head/**' + - 'stl-preview-base/**' + +jobs: + lint: + timeout-minutes: 10 + name: lint + runs-on: ${{ github.repository == 'stainless-sdks/openai-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Install dependencies + run: rye sync --all-features + + - name: Run lints + run: ./scripts/lint + + build: + if: github.repository == 'stainless-sdks/openai-python' && (github.event_name == 'push' || github.event.pull_request.head.repo.fork) + timeout-minutes: 10 + name: build + permissions: + contents: read + id-token: write + runs-on: depot-ubuntu-24.04 + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Install dependencies + run: rye sync --all-features + + - name: Run build + run: rye build + + - name: Get GitHub OIDC Token + id: github-oidc + uses: actions/github-script@v6 + with: + script: core.setOutput('github_token', await core.getIDToken()); + + - name: Upload tarball + env: + URL: https://pkg.stainless.com/s + AUTH: ${{ steps.github-oidc.outputs.github_token }} + SHA: ${{ github.sha }} + run: ./scripts/utils/upload-artifact.sh + + test: + timeout-minutes: 10 + name: test + runs-on: ${{ github.repository == 'stainless-sdks/openai-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Bootstrap + run: ./scripts/bootstrap + + - name: Run tests + run: ./scripts/test + + examples: + timeout-minutes: 10 + name: examples + runs-on: ${{ github.repository == 'stainless-sdks/openai-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.repository == 'openai/openai-python' && (github.event_name == 'push' || github.event.pull_request.head.repo.fork) + + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + - name: Install dependencies + run: | + rye sync --all-features + - env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + run: | + rye run python examples/demo.py + - env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + run: | + rye run python examples/async_demo.py +You’re working with: + +\[ +\bar{x}_j = \frac{1}{|C_j|} \sum_{v_i \in C_j} x_i +\] + +This gives the **mean** of values \( x_i \) in cluster \( C_j \). Now you’re asking: + +> What is \( \bar{x}_j \div 18.33 \)? + +--- + +### 🧮 Interpretation + +Let’s assume: + +- You already computed \( \bar{x}_j = 18.33 \) +- Now you're dividing that result by 18.33 + +So: + +\[ +\frac{\bar{x}_j}{18.33} = \frac{18.33}{18.33} = 1 +\] + +✅ **Final result: 1 diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml new file mode 100644 index 0000000000..f076c65d87 --- /dev/null +++ b/.github/workflows/docker-image.yml @@ -0,0 +1,23 @@ +configured_endpoints: 111 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-a473967d1766dc155994d932fbc4a5bcbd1c140a37c20d0a4065e1bf0640536d.yml +openapi_spec_hash: 67cdc62b0d6c8b1de29b7dc54b265749 +config_hash: 7b53f96f897ca1b3407a5341a6f820db + +name: Docker Image CI + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +jobs: + + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: Build the Docker image + run: docker build . --file Dockerfile --tag my-image-name:$(date +%s) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000000..4cccfe63cf --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,179 @@ +Let’s break this down precisely and compute what you're asking. + +--- + +# Config for the Gemini Pull Request Review Bot. +# https://github.com/marketplace/gemini-code-assist +have_fun: false +code_review: + disable: false + comment_severity_threshold: 'HIGH' + max_review_comments: -1 + pull_request_opened: + help: false + summary: true + code_review: true +ignore_patterns: [] +name: CI +on: + push: + branches-ignore: + - 'generated' + - 'codegen/**' + - 'integrated/**' + - 'stl-preview-head/**' + - 'stl-preview-base/**' + pull_request: + branches-ignore: + - 'stl-preview-head/**' + - 'stl-preview-base/**' + +jobs: + lint: + timeout-minutes: 10 + name: lint + runs-on: ${{ github.repository == 'stainless-sdks/openai-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Install dependencies + run: rye sync --all-features + + - name: Run lints + run: ./scripts/lint + + build: + if: github.repository == 'stainless-sdks/openai-python' && (github.event_name == 'push' || github.event.pull_request.head.repo.fork) + timeout-minutes: 10 + name: build + permissions: + contents: read + id-token: write + runs-on: depot-ubuntu-24.04 + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Install dependencies + run: rye sync --all-features + + - name: Run build + run: rye build + + - name: Get GitHub OIDC Token + id: github-oidc + uses: actions/github-script@v6 + with: + script: core.setOutput('github_token', await core.getIDToken()); + + - name: Upload tarball + env: + URL: https://pkg.stainless.com/s + AUTH: ${{ steps.github-oidc.outputs.github_token }} + SHA: ${{ github.sha }} + run: ./scripts/utils/upload-artifact.sh + + test: + timeout-minutes: 10 + name: test + runs-on: ${{ github.repository == 'stainless-sdks/openai-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Bootstrap + run: ./scripts/bootstrap + + - name: Run tests + run: ./scripts/test + + examples: + timeout-minutes: 10 + name: examples + runs-on: ${{ github.repository == 'stainless-sdks/openai-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.repository == 'openai/openai-python' && (github.event_name == 'push' || github.event.pull_request.head.repo.fork) + + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + - name: Install dependencies + run: | + rye sync --all-features + + - env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + run: | + rye run python examples/demo.py + - env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + run: | + rye run python examples/async_demo.py +You’re working with: + +\[ +\bar{x}_j = \frac{1}{|C_j|} \sum_{v_i \in C_j} x_i +\] + +This gives the **mean** of values \( x_i \) in cluster \( C_j \). Now you’re asking: + +> What is \( \bar{x}_j \div 18.33 \)? + +--- + +### 🧮 Interpretation + +Let’s assume: + +- You already computed \( \bar{x}_j = 18.33 \) +- Now you're dividing that result by 18.33 + +So: + +\[ +\frac{\bar{x}_j}{18.33} = \frac{18.33}{18.33} = 1 +\] + +✅ **Final result: 1 + +--- + +### 🧠 Why This Matters + +This kind of normalization is often used in: + +- **Standardization**: Comparing a value to a reference mean +- **Ratio analysis**: Checking proportional relationships +- **Feature scaling**: In machine learning preprocessing + +If you’re building a model or analyzing clusters, this could be part of a larger transformation. Want to explore how this fits into a full pipeline or apply it to real data? diff --git a/.github/workflows/openshift.yml b/.github/workflows/openshift.yml new file mode 100644 index 0000000000..9b33ebf73c --- /dev/null +++ b/.github/workflows/openshift.yml @@ -0,0 +1,203 @@ +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +# 💁 The OpenShift Starter workflow will: +# - Checkout your repository +# - Perform a container image build +# - Push the built image to the GitHub Container Registry (GHCR) +# - Log in to your OpenShift cluster +# - Create an OpenShift app from the image and expose it to the internet + +# ℹ️ Configure your repository and the workflow with the following steps: +# 1. Have access to an OpenShift cluster. Refer to https://www.openshift.com/try +# 2. Create the OPENSHIFT_SERVER and OPENSHIFT_TOKEN repository secrets. Refer to: +# - https://github.com/redhat-actions/oc-login#readme +# - https://docs.github.com/en/actions/reference/encrypted-secrets +# - https://cli.github.com/manual/gh_secret_set +# 3. (Optional) Edit the top-level 'env' section as marked with '🖊️' if the defaults are not suitable for your project. +# 4. (Optional) Edit the build-image step to build your project. +# The default build type is by using a Dockerfile at the root of the repository, +# but can be replaced with a different file, a source-to-image build, or a step-by-step buildah build. +# 5. Commit and push the workflow file to your default branch to trigger a workflow run. + +# 👋 Visit our GitHub organization at https://github.com/redhat-actions/ to see our actions and provide feedback. + +name: OpenShift + name: Setup Node.js environment + uses: actions/setup-node@v3.9.1 +env: + # 🖊️ EDIT your repository secrets to log into your OpenShift cluster and set up the context. + # See https://github.com/redhat-actions/oc-login#readme for how to retrieve these values. + # To get a permanent token, refer to https://github.com/redhat-actions/oc-login/wiki/Using-a-Service-Account-for-GitHub-Actions + OPENSHIFT_SERVER: ${{ secrets.OPENSHIFT_SERVER }} + OPENSHIFT_TOKEN: ${{ secrets.OPENSHIFT_TOKEN }} + # 🖊️ EDIT to set the kube context's namespace after login. Leave blank to use your user's default namespace. + OPENSHIFT_NAMESPACE: "" + + # 🖊️ EDIT to set a name for your OpenShift app, or a default one will be generated below. + APP_NAME: "" + + # 🖊️ EDIT with the port your application should be accessible on. + # If the container image exposes *exactly one* port, this can be left blank. + # Refer to the 'port' input of https://github.com/redhat-actions/oc-new-app + APP_PORT: "" + + # 🖊️ EDIT to change the image registry settings. + # Registries such as GHCR, Quay.io, and Docker Hub are supported. + IMAGE_REGISTRY: ghcr.io/${{ github.repository_owner }} + IMAGE_REGISTRY_USER: ${{ github.actor }} + IMAGE_REGISTRY_PASSWORD: ${{ github.token }} + + # 🖊️ EDIT to specify custom tags for the container image, or default tags will be generated below. + IMAGE_TAGS: "" + +on: + # https://docs.github.com/en/actions/reference/events-that-trigger-workflows + workflow_dispatch: + push: + # Edit to the branch(es) you want to build and deploy on each push. + branches: [ "main" ] + +jobs: + # 🖊️ EDIT if you want to run vulnerability check on your project before deploying + # the application. Please uncomment the below CRDA scan job and configure to run it in + # your workflow. For details about CRDA action visit https://github.com/redhat-actions/crda/blob/main/README.md + # + # TODO: Make sure to add 'CRDA Scan' starter workflow from the 'Actions' tab. + # For guide on adding new starter workflow visit https://docs.github.com/en/github-ae@latest/actions/using-workflows/using-starter-workflows + + #crda-scan: + # uses: ./.github/workflows/crda.yml + # secrets: + # CRDA_KEY: ${{ secrets.CRDA_KEY }} + # # SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} # Either use SNYK_TOKEN or CRDA_KEY + + openshift-ci-cd: + # 🖊️ Uncomment this if you are using CRDA scan step above + # needs: crda-scan + name: Build and deploy to OpenShift + runs-on: ubuntu-latest + environment: production + + outputs: + ROUTE: ${{ steps.deploy-and-expose.outputs.route }} + SELECTOR: ${{ steps.deploy-and-expose.outputs.selector }} + + steps: + - name: Check for required secrets + uses: actions/github-script@v6 + with: + script: | + const secrets = { + OPENSHIFT_SERVER: `${{ secrets.OPENSHIFT_SERVER }}`, + OPENSHIFT_TOKEN: `${{ secrets.OPENSHIFT_TOKEN }}`, + }; + + const GHCR = "ghcr.io"; + if (`${{ env.IMAGE_REGISTRY }}`.startsWith(GHCR)) { + core.info(`Image registry is ${GHCR} - no registry password required`); + } + else { + core.info("A registry password is required"); + secrets["IMAGE_REGISTRY_PASSWORD"] = `${{ secrets.IMAGE_REGISTRY_PASSWORD }}`; + } + + const missingSecrets = Object.entries(secrets).filter(([ name, value ]) => { + if (value.length === 0) { + core.error(`Secret "${name}" is not set`); + return true; + } + core.info(`✔️ Secret "${name}" is set`); + return false; + }); + + if (missingSecrets.length > 0) { + core.setFailed(`❌ At least one required secret is not set in the repository. \n` + + "You can add it using:\n" + + "GitHub UI: https://docs.github.com/en/actions/reference/encrypted-secrets#creating-encrypted-secrets-for-a-repository \n" + + "GitHub CLI: https://cli.github.com/manual/gh_secret_set \n" + + "Also, refer to https://github.com/redhat-actions/oc-login#getting-started-with-the-action-or-see-example"); + } + else { + core.info(`✅ All the required secrets are set`); + } + + - name: Check out repository + uses: actions/checkout@v4 + + - name: Determine app name + if: env.APP_NAME == '' + run: | + echo "APP_NAME=$(basename $PWD)" | tee -a $GITHUB_ENV + + - name: Determine image tags + if: env.IMAGE_TAGS == '' + run: | + echo "IMAGE_TAGS=latest ${GITHUB_SHA::12}" | tee -a $GITHUB_ENV + + # https://github.com/redhat-actions/buildah-build#readme + - name: Build from Dockerfile + id: build-image + uses: redhat-actions/buildah-build@v2 + with: + image: ${{ env.APP_NAME }} + tags: ${{ env.IMAGE_TAGS }} + + # If you don't have a Dockerfile/Containerfile, refer to https://github.com/redhat-actions/buildah-build#scratch-build-inputs + # Or, perform a source-to-image build using https://github.com/redhat-actions/s2i-build + # Otherwise, point this to your Dockerfile/Containerfile relative to the repository root. + dockerfiles: | + ./Dockerfile + + # https://github.com/redhat-actions/push-to-registry#readme + - name: Push to registry + id: push-image + uses: redhat-actions/push-to-registry@v2 + with: + image: ${{ steps.build-image.outputs.image }} + tags: ${{ steps.build-image.outputs.tags }} + registry: ${{ env.IMAGE_REGISTRY }} + username: ${{ env.IMAGE_REGISTRY_USER }} + password: ${{ env.IMAGE_REGISTRY_PASSWORD }} + + # The path the image was pushed to is now stored in ${{ steps.push-image.outputs.registry-path }} + + - name: Install oc + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: 4 + + # https://github.com/redhat-actions/oc-login#readme + - name: Log in to OpenShift + uses: redhat-actions/oc-login@v1 + with: + openshift_server_url: ${{ env.OPENSHIFT_SERVER }} + openshift_token: ${{ env.OPENSHIFT_TOKEN }} + insecure_skip_tls_verify: true + namespace: ${{ env.OPENSHIFT_NAMESPACE }} + + # This step should create a deployment, service, and route to run your app and expose it to the internet. + # https://github.com/redhat-actions/oc-new-app#readme + - name: Create and expose app + id: deploy-and-expose + uses: redhat-actions/oc-new-app@v1 + with: + app_name: ${{ env.APP_NAME }} + image: ${{ steps.push-image.outputs.registry-path }} + namespace: ${{ env.OPENSHIFT_NAMESPACE }} + port: ${{ env.APP_PORT }} + + - name: Print application URL + env: + ROUTE: ${{ steps.deploy-and-expose.outputs.route }} + SELECTOR: ${{ steps.deploy-and-expose.outputs.selector }} + run: | + [[ -n ${{ env.ROUTE }} ]] || (echo "Determining application route failed in previous step"; exit 1) + echo + echo "======================== Your application is available at: ========================" + echo ${{ env.ROUTE }} + echo "===================================================================================" + echo + echo "Your app can be taken down with: \"oc delete all --selector='${{ env.SELECTOR }}'\"" diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml index e078964a6f..86798b7e3a 100644 --- a/.github/workflows/release-doctor.yml +++ b/.github/workflows/release-doctor.yml @@ -1,5 +1,375 @@ name: Release Doctor on: +#!/usr/bin/env python3 + +# Buildtools and buildtools extended installer helper script +# +# Copyright (C) 2017-2020 Intel Corporation +# +# SPDX-License-Identifier: GPL-2.0-only +# +# NOTE: --with-extended-buildtools is on by default +# +# Example usage (extended buildtools from milestone): +# (1) using --url and --filename +# $ install-buildtools \ +# --url http://downloads.yoctoproject.org/releases/yocto/milestones/yocto-3.1_M3/buildtools \ +# --filename x86_64-buildtools-extended-nativesdk-standalone-3.0+snapshot-20200315.sh +# (2) using --base-url, --release, --installer-version and --build-date +# $ install-buildtools \ +# --base-url http://downloads.yoctoproject.org/releases/yocto \ +# --release yocto-3.1_M3 \ +# --installer-version 3.0+snapshot +# --build-date 202000315 +# +# Example usage (standard buildtools from release): +# (3) using --url and --filename +# $ install-buildtools --without-extended-buildtools \ +# --url http://downloads.yoctoproject.org/releases/yocto/yocto-3.0.2/buildtools \ +# --filename x86_64-buildtools-nativesdk-standalone-3.0.2.sh +# (4) using --base-url, --release and --installer-version +# $ install-buildtools --without-extended-buildtools \ +# --base-url http://downloads.yoctoproject.org/releases/yocto \ +# --release yocto-3.0.2 \ +# --installer-version 3.0.2 +# + +import argparse +import logging +import os +import platform +import re +import shutil +import shlex +import stat +import subprocess +import sys +import tempfile +from urllib.parse import quote + +scripts_path = os.path.dirname(os.path.realpath(__file__)) +lib_path = scripts_path + '/lib' +sys.path = sys.path + [lib_path] +import scriptutils +import scriptpath + + +PROGNAME = 'install-buildtools' +logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout) + +DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools') +DEFAULT_BASE_URL = 'https://downloads.yoctoproject.org/releases/yocto' +DEFAULT_RELEASE = 'yocto-5.2.2' +DEFAULT_INSTALLER_VERSION = '5.2.2' +DEFAULT_BUILDDATE = '202110XX' + +# Python version sanity check +if not (sys.version_info.major == 3 and sys.version_info.minor >= 4): + logger.error("This script requires Python 3.4 or greater") + logger.error("You have Python %s.%s" % + (sys.version_info.major, sys.version_info.minor)) + sys.exit(1) + +# The following three functions are copied directly from +# bitbake/lib/bb/utils.py, in order to allow this script +# to run on versions of python earlier than what bitbake +# supports (e.g. less than Python 3.5 for YP 3.1 release) + +def _hasher(method, filename): + import mmap + + with open(filename, "rb") as f: + try: + with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm: + for chunk in iter(lambda: mm.read(8192), b''): + method.update(chunk) + except ValueError: + # You can't mmap() an empty file so silence this exception + pass + return method.hexdigest() + + +def md5_file(filename): + """ + Return the hex string representation of the MD5 checksum of filename. + """ + import hashlib + return _hasher(hashlib.md5(), filename) + +def sha256_file(filename): + """ + Return the hex string representation of the 256-bit SHA checksum of + filename. + """ + import hashlib + return _hasher(hashlib.sha256(), filename) + +def remove_quotes(var): + """ + If a variable starts and ends with double quotes, remove them. + Assumption: if a variable starts with double quotes, it must also + end with them. + """ + if var[0] == '"': + var = var[1:-1] + return var + + +def main(): + global DEFAULT_INSTALL_DIR + global DEFAULT_BASE_URL + global DEFAULT_RELEASE + global DEFAULT_INSTALLER_VERSION + global DEFAULT_BUILDDATE + filename = "" + release = "" + buildtools_url = "" + install_dir = "" + arch = platform.machine() + + parser = argparse.ArgumentParser( + description="Buildtools installation helper", + add_help=False, + formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument('-u', '--url', + help='URL from where to fetch buildtools SDK installer, not ' + 'including filename (optional)\n' + 'Requires --filename.', + action='store') + parser.add_argument('-f', '--filename', + help='filename for the buildtools SDK installer to be installed ' + '(optional)\nRequires --url', + action='store') + parser.add_argument('-d', '--directory', + default=DEFAULT_INSTALL_DIR, + help='directory where buildtools SDK will be installed (optional)', + action='store') + parser.add_argument('--downloads-directory', + help='use this directory for tarball/checksum downloads and do not erase them (default is a temporary directory which is deleted after unpacking and installing the buildtools)', + action='store') + parser.add_argument('-r', '--release', + default=DEFAULT_RELEASE, + help='Yocto Project release string for SDK which will be ' + 'installed (optional)', + action='store') + parser.add_argument('-V', '--installer-version', + default=DEFAULT_INSTALLER_VERSION, + help='version string for the SDK to be installed (optional)', + action='store') + parser.add_argument('-b', '--base-url', + default=DEFAULT_BASE_URL, + help='base URL from which to fetch SDK (optional)', action='store') + parser.add_argument('-t', '--build-date', + default=DEFAULT_BUILDDATE, + help='Build date of pre-release SDK (optional)', action='store') + group = parser.add_mutually_exclusive_group() + group.add_argument('--with-extended-buildtools', action='store_true', + dest='with_extended_buildtools', + default=True, + help='enable extended buildtools tarball (on by default)') + group.add_argument('--without-extended-buildtools', action='store_false', + dest='with_extended_buildtools', + help='disable extended buildtools (traditional buildtools tarball)') + group.add_argument('--make-only', action='store_true', + help='only install make tarball') + group = parser.add_mutually_exclusive_group() + group.add_argument('-c', '--check', help='enable checksum validation', + default=True, action='store_true') + group.add_argument('-n', '--no-check', help='disable checksum validation', + dest="check", action='store_false') + parser.add_argument('-D', '--debug', help='enable debug output', + action='store_true') + parser.add_argument('-q', '--quiet', help='print only errors', + action='store_true') + + parser.add_argument('-h', '--help', action='help', + default=argparse.SUPPRESS, + help='show this help message and exit') + + args = parser.parse_args() + + if args.make_only: + args.with_extended_buildtools = False + + if args.debug: + logger.setLevel(logging.DEBUG) + elif args.quiet: + logger.setLevel(logging.ERROR) + + if args.url and args.filename: + logger.debug("--url and --filename detected. Ignoring --base-url " + "--release --installer-version arguments.") + filename = args.filename + buildtools_url = "%s/%s" % (args.url, filename) + else: + if args.base_url: + base_url = args.base_url + else: + base_url = DEFAULT_BASE_URL + if args.release: + # check if this is a pre-release "milestone" SDK + m = re.search(r"^(?P[a-zA-Z\-]+)(?P[0-9.]+)(?P_M[1-9])$", + args.release) + logger.debug("milestone regex: %s" % m) + if m and m.group('milestone'): + logger.debug("release[distro]: %s" % m.group('distro')) + logger.debug("release[version]: %s" % m.group('version')) + logger.debug("release[milestone]: %s" % m.group('milestone')) + if not args.build_date: + logger.error("Milestone installers require --build-date") + else: + if args.make_only: + filename = "%s-buildtools-make-nativesdk-standalone-%s-%s.sh" % ( + arch, args.installer_version, args.build_date) + elif args.with_extended_buildtools: + filename = "%s-buildtools-extended-nativesdk-standalone-%s-%s.sh" % ( + arch, args.installer_version, args.build_date) + else: + filename = "%s-buildtools-nativesdk-standalone-%s-%s.sh" % ( + arch, args.installer_version, args.build_date) + safe_filename = quote(filename) + buildtools_url = "%s/milestones/%s/buildtools/%s" % (base_url, args.release, safe_filename) + # regular release SDK + else: + if args.make_only: + filename = "%s-buildtools-make-nativesdk-standalone-%s.sh" % (arch, args.installer_version) + if args.with_extended_buildtools: + filename = "%s-buildtools-extended-nativesdk-standalone-%s.sh" % (arch, args.installer_version) + else: + filename = "%s-buildtools-nativesdk-standalone-%s.sh" % (arch, args.installer_version) + safe_filename = quote(filename) + buildtools_url = "%s/%s/buildtools/%s" % (base_url, args.release, safe_filename) + + sdk_dir = args.downloads_directory or tempfile.mkdtemp() + os.makedirs(sdk_dir, exist_ok=True) + try: + # Fetch installer + logger.info("Fetching buildtools installer") + tmpbuildtools = os.path.join(sdk_dir, filename) + with open(os.path.join(sdk_dir, 'buildtools_url'), 'w') as f: + f.write(buildtools_url) + ret = subprocess.call("wget -q -O %s %s" % + (tmpbuildtools, buildtools_url), shell=True) + if ret != 0: + logger.error("Could not download file from %s" % buildtools_url) + return ret + + # Verify checksum + if args.check: + logger.info("Fetching buildtools installer checksum") + checksum_type = "sha256sum" + checksum_url = "{}.{}".format(buildtools_url, checksum_type) + checksum_filename = "{}.{}".format(filename, checksum_type) + tmpbuildtools_checksum = os.path.join(sdk_dir, checksum_filename) + with open(os.path.join(sdk_dir, 'checksum_url'), 'w') as f: + f.write(checksum_url) + ret = subprocess.call("wget -q -O %s %s" % + (tmpbuildtools_checksum, checksum_url), shell=True) + if ret != 0: + logger.error("Could not download file from %s" % checksum_url) + return ret + regex = re.compile(r"^(?P[0-9a-f]+)\s+(?P.*/)?(?P.*)$") + with open(tmpbuildtools_checksum, 'rb') as f: + original = f.read() + m = re.search(regex, original.decode("utf-8")) + logger.debug("checksum regex match: %s" % m) + logger.debug("checksum: %s" % m.group('checksum')) + logger.debug("path: %s" % m.group('path')) + logger.debug("filename: %s" % m.group('filename')) + if filename != m.group('filename'): + logger.error("Filename does not match name in checksum") + return 1 + checksum = m.group('checksum') + checksum_value = sha256_file(tmpbuildtools) + if checksum == checksum_value: + logger.info("Checksum success") + else: + logger.error("Checksum %s expected. Actual checksum is %s." % + (checksum, checksum_value)) + return 1 + + # Make installer executable + logger.info("Making installer executable") + st = os.stat(tmpbuildtools) + os.chmod(tmpbuildtools, st.st_mode | stat.S_IEXEC) + logger.debug(os.stat(tmpbuildtools)) + if args.directory: + install_dir = os.path.abspath(args.directory) + ret = subprocess.call("%s -d %s -y" % + (tmpbuildtools, install_dir), shell=True) + else: + install_dir = "/opt/poky/%s" % args.installer_version + ret = subprocess.call("%s -y" % tmpbuildtools, shell=True) + if ret != 0: + logger.error("Could not run buildtools installer") + return ret + + # Setup the environment + logger.info("Setting up the environment") + regex = re.compile(r'^(?Pexport )?(?P[A-Z_]+)=(?P.+)$') + with open("%s/environment-setup-%s-pokysdk-linux" % + (install_dir, arch), 'rb') as f: + for line in f: + match = regex.search(line.decode('utf-8')) + logger.debug("export regex: %s" % match) + if match: + env_var = match.group('env_var') + logger.debug("env_var: %s" % env_var) + env_val = remove_quotes(match.group('env_val')) + logger.debug("env_val: %s" % env_val) + os.environ[env_var] = env_val + + # Test installation + logger.info("Testing installation") + tool = "" + m = re.search("extended", tmpbuildtools) + logger.debug("extended regex: %s" % m) + if args.with_extended_buildtools and not m: + logger.info("Ignoring --with-extended-buildtools as filename " + "does not contain 'extended'") + if args.make_only: + tool = 'make' + elif args.with_extended_buildtools and m: + tool = 'gcc' + else: + tool = 'tar' + logger.debug("install_dir: %s" % install_dir) + cmd = shlex.split("/usr/bin/which %s" % tool) + logger.debug("cmd: %s" % cmd) + logger.debug("tool: %s" % tool) + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) + output, errors = proc.communicate() + logger.debug("proc.args: %s" % proc.args) + logger.debug("proc.communicate(): output %s" % output) + logger.debug("proc.communicate(): errors %s" % errors) + which_tool = output.decode('utf-8') + logger.debug("which %s: %s" % (tool, which_tool)) + ret = proc.returncode + if not which_tool.startswith(install_dir): + logger.error("Something went wrong: %s not found in %s" % + (tool, install_dir)) + if ret != 0: + logger.error("Something went wrong: installation failed") + else: + logger.info("Installation successful. Remember to source the " + "environment setup script now and in any new session.") + return ret + + finally: + # cleanup tmp directory + if not args.downloads_directory: + shutil.rmtree(sdk_dir) + + +if __name__ == '__main__': + try: + ret = main() + except Exception: + ret = 1 + import traceback + + traceback.print_exc() + sys.exit(ret) push: branches: - main