Skip to content

Release v2.10.0 #78

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 19 commits into
base: v2.10.x
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion .github/scripts/release_prep/create_branches.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,21 @@ git clone \
--branch=master \
https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${DSP_REPOSITORY_FULL} \
${DSP_DIR}
cd ${DSP_DIR}
pushd ${DSP_DIR}
git checkout -B ${MINOR_RELEASE_BRANCH}
git push origin ${MINOR_RELEASE_BRANCH}
echo "::notice:: Created DSP ${MINOR_RELEASE_BRANCH} branch"
popd

echo "Current branches in ${DSP_PIPELINES_REPOSITORY_FULL}"
DSP_PIPELINES_DIR=$(dirname ${WORKING_DIR})/ilab-on-ocp
git clone \
--depth=1 \
--branch=main \
https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${DSP_PIPELINES_REPOSITORY_FULL} \
${DSP_PIPELINES_DIR}
pushd ${DSP_PIPELINES_DIR}
git checkout -B ${MINOR_RELEASE_BRANCH}
git push origin ${MINOR_RELEASE_BRANCH}
echo "::notice:: Created DSP Pipelines ${MINOR_RELEASE_BRANCH} branch"
popd
4 changes: 3 additions & 1 deletion .github/scripts/release_prep/prereqs.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ check_branch_exists(){

check_branch_exists ${DSPO_REPOSITORY_FULL} ${MINOR_RELEASE_BRANCH}
check_branch_exists ${DSP_REPOSITORY_FULL} ${MINOR_RELEASE_BRANCH}
check_branch_exists ${DSP_PIPELINES_REPOSITORY_FULL} ${MINOR_RELEASE_BRANCH}

echo "Ensure compatibility.yaml is upto date, and generate a new compatibility.md. Use [release-tools] to accomplish this"

Expand All @@ -27,7 +28,8 @@ git checkout -B ${BRANCH_NAME}
echo "Created branch: ${BRANCH_NAME}"
echo "Checking if compatibility.yaml contains ${TARGET_RELEASE} release...."

contains_rel=$(cat docs/release/compatibility.yaml | rel=${MINOR_RELEASE_WILDCARD} yq '[.[].dsp] | contains([env(rel)])')
# convert rel to string in env(rel) explicitly to avoid comparing str to yq float
contains_rel=$(cat docs/release/compatibility.yaml | rel=${MINOR_RELEASE_WILDCARD} yq '[.[].dsp] | contains([""+env(rel)])')

if [[ "$contains_rel" == "false" ]]; then

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build-prs-trigger.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ jobs:
echo ${{ github.event.pull_request.state }} >> ./pr/pr_state
echo ${{ github.event.pull_request.head.sha }} >> ./pr/head_sha
echo ${{ github.event.action }} >> ./pr/event_action
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
name: pr
path: pr/
26 changes: 13 additions & 13 deletions .github/workflows/build-prs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,24 +25,24 @@ jobs:
event_action: ${{ steps.vars.outputs.event_action }}
steps:
- name: 'Download artifact'
uses: actions/github-script@v3.1.0
uses: actions/github-script@v6
with:
script: |
var artifacts = await github.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id}},
});
var matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "pr"
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
return artifact.name == "pr"
})[0];
var download = await github.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: 'zip',
let download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: 'zip',
});
var fs = require('fs');
let fs = require('fs');
fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data));
- run: unzip pr.zip
- shell: bash
Expand Down
38 changes: 38 additions & 0 deletions .github/workflows/build-tags.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,11 @@ on:
default: 'v1.0.x'
description: 'Source branch to build DSP from'
required: true
src_pipelines_branch:
type: string
default: 'v1.0.x'
description: 'Source branch to build DSP Pipelines Generic Image from'
required: true
target_tag:
type: string
default: 'vx.y.z'
Expand All @@ -28,6 +33,11 @@ on:
default: 'opendatahub-io/data-science-pipelines'
description: 'DSP org/repo'
required: true
pipelines_org_repo:
type: string
default: 'opendatahub-io/ilab-on-ocp'
description: 'DSP Pipelines org/repo'
required: true
overwrite_imgs:
type: string
default: 'true'
Expand All @@ -45,6 +55,11 @@ on:
default: 'v1.0.x'
description: 'Source branch to build DSP from'
required: true
src_pipelines_branch:
type: string
default: 'v1.0.x'
description: 'Source branch to build DSP Pipelines Generic Image from'
required: true
target_tag:
default: 'vx.y.z'
description: 'Target Image Tag'
Expand All @@ -57,6 +72,11 @@ on:
default: 'opendatahub-io/data-science-pipelines'
description: 'DSP org/repo'
required: true
pipelines_org_repo:
type: string
default: 'opendatahub-io/ilab-on-ocp'
description: 'DSP Pipelines org/repo'
required: true
overwrite_imgs:
type: string
default: 'false'
Expand All @@ -70,8 +90,10 @@ env:
IMAGE_REPO_SWF: ds-pipelines-scheduledworkflow
IMAGE_REPO_LAUNCHER: ds-pipelines-launcher
IMAGE_REPO_DRIVER: ds-pipelines-driver
IMAGE_REPO_PIPELINES_RUNTIME_GENERIC: ds-pipelines-runtime-generic
SOURCE_DSPO_BRANCH: ${{ inputs.src_dspo_branch }}
SOURCE_DSP_BRANCH: ${{ inputs.src_dsp_branch }}
SOURCE_DSP_PIPELINES_BRANCH: ${{ inputs.src_pipelines_branch }}
QUAY_ORG: ${{ inputs.quay_org }}
QUAY_ID: ${{ secrets.QUAY_ID }}
QUAY_TOKEN: ${{ secrets.QUAY_TOKEN }}
Expand Down Expand Up @@ -189,3 +211,19 @@ jobs:
DOCKERFILE: backend/Dockerfile.launcher
GH_REPO: ${{ inputs.dsp_org_repo }}
OVERWRITE: ${{ env.OVERWRITE_IMAGES }}

RUNTIME-GENERIC-build:
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/build
name: Build Image
env:
SOURCE_BRANCH: ${{ env.SOURCE_DSP_PIPELINES_BRANCH }}
with:
IMAGE_REPO: ${{ env.IMAGE_REPO_PIPELINES_RUNTIME_GENERIC }}
DOCKERFILE: Dockerfile
GH_REPO: ${{ inputs.pipelines_org_repo }}
OVERWRITE: ${{ env.OVERWRITE_IMAGES }}
24 changes: 12 additions & 12 deletions .github/workflows/release_create.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,21 +29,21 @@ jobs:
uses: actions/[email protected]
with:
script: |
var artifacts = await github.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id}},
});
var matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "pr"
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
return artifact.name == "pr"
})[0];
var download = await github.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: 'zip',
let download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: 'zip',
});
var fs = require('fs');
let fs = require('fs');
fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data));
- run: unzip pr.zip
- shell: bash
Expand Down
10 changes: 8 additions & 2 deletions .github/workflows/release_prep.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ env:
DSPO_REPOSITORY_FULL: ${{ inputs.gh_org }}/data-science-pipelines-operator
DSP_REPOSITORY: data-science-pipelines
DSP_REPOSITORY_FULL: ${{ inputs.gh_org }}/data-science-pipelines
DSP_PIPELINES_REPOSITORY_FULL: ${{ inputs.gh_org }}/ilab-on-ocp
PREVIOUS_RELEASE_TAG: ${{ inputs.previous_release_tag }}
OVERWRITE_IMAGES: ${{ inputs.overwrite_imgs }}
CONFIG_TEMPLATE: "./.github/scripts/release_prep/templates/config.yaml"
Expand All @@ -66,6 +67,7 @@ jobs:
MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }}
DSPO_REPOSITORY_FULL: ${{ env.DSPO_REPOSITORY_FULL }}
DSPO_REPOSITORY: ${{ env.DSPO_REPOSITORY }}
DSP_PIPELINES_REPOSITORY_FULL: ${{ env.DSP_PIPELINES_REPOSITORY_FULL }}
run: ./.github/scripts/release_prep/prereqs.sh

create_branches:
Expand All @@ -86,6 +88,7 @@ jobs:
MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }}
DSPO_REPOSITORY_FULL: ${{ env.DSPO_REPOSITORY_FULL }}
DSP_REPOSITORY_FULL: ${{ env.DSP_REPOSITORY_FULL }}
DSP_PIPELINES_REPOSITORY_FULL: ${{ env.DSP_PIPELINES_REPOSITORY_FULL }}
WORKING_DIR: ${{ github.workspace }}
run: ./.github/scripts/release_prep/create_branches.sh

Expand All @@ -100,7 +103,8 @@ jobs:
MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }}
MINOR_RELEASE_TAG: ${{ env.MINOR_RELEASE_TAG }}
QUAY_ORG: ${{ env.QUAY_ORG }}
DSPO_GH_ORG: ${{ env.DSP_REPOSITORY_FULL }}
DSP_GH_ORG_REPO: ${{ env.DSP_REPOSITORY_FULL }}
DSP_PIPELINES_GH_ORG_REPO: ${{ env.DSP_PIPELINES_REPOSITORY_FULL }}
OVERWRITE_IMAGES: ${{ env.OVERWRITE_IMAGES }}
steps:
- run: echo "Storing env vars for re-usable workflow."
Expand All @@ -113,9 +117,11 @@ jobs:
with:
src_dspo_branch: ${{ needs.get-env-vars.outputs.MINOR_RELEASE_BRANCH }}
src_dsp_branch: ${{ needs.get-env-vars.outputs.MINOR_RELEASE_BRANCH }}
src_pipelines_branch: ${{ needs.get-env-vars.outputs.MINOR_RELEASE_BRANCH }}
target_tag: ${{ needs.get-env-vars.outputs.MINOR_RELEASE_TAG }}
quay_org: ${{ needs.get-env-vars.outputs.QUAY_ORG }}
dsp_org_repo: ${{ needs.get-env-vars.outputs.DSPO_GH_ORG }}
dsp_org_repo: ${{ needs.get-env-vars.outputs.DSP_GH_ORG_REPO }}
pipelines_org_repo: ${{ needs.get-env-vars.outputs.DSP_PIPELINES_GH_ORG_REPO }}
overwrite_imgs: ${{ needs.get-env-vars.outputs.OVERWRITE_IMAGES }}
secrets: inherit

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/release_trigger.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
PR_STATE: ${{ github.event.pull_request.state }}
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
run: ./.github/scripts/release_trigger/upload-data.sh
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
name: pr
path: pr/
45 changes: 41 additions & 4 deletions api/v1/dspipeline_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,28 @@ type DSPASpec struct {
*WorkflowController `json:"workflowController,omitempty"`
}

// +kubebuilder:validation:Pattern=`^(Managed|Removed)$`
type ManagedPipelineState string

type ManagedPipelineOptions struct {
// Set to one of the following values:
//
// - "Managed" : This pipeline is automatically imported.
// - "Removed" : This pipeline is not automatically imported. If previously set to "Managed", setting to "Removed" does not remove existing managed pipelines but does prevent future updates from being imported.
//
// +kubebuilder:validation:Enum=Managed;Removed
// +kubebuilder:default=Removed
// +kubebuilder:validation:Optional
State ManagedPipelineState `json:"state,omitempty"`
}

type ManagedPipelinesSpec struct {
// Configures whether to automatically import the InstructLab pipeline.
// You must enable the trainingoperator component to run the InstructLab pipeline.
// +kubebuilder:validation:Optional
InstructLab *ManagedPipelineOptions `json:"instructLab,omitempty"`
}

type APIServer struct {
// Enable DS Pipelines Operator management of DSP API Server. Setting Deploy to false disables operator reconciliation. Default: true
// +kubebuilder:default:=true
Expand All @@ -66,14 +88,29 @@ type APIServer struct {
// +kubebuilder:default:=true
// +kubebuilder:validation:Optional
EnableRoute bool `json:"enableOauth"`
// Include sample pipelines with the deployment of this DSP API Server. Default: true
// Include the Iris sample pipeline with the deployment of this DSP API Server. Default: true
// +kubebuilder:default:=false
// +kubebuilder:validation:Optional
EnableSamplePipeline bool `json:"enableSamplePipeline"`
ArgoLauncherImage string `json:"argoLauncherImage,omitempty"`
ArgoDriverImage string `json:"argoDriverImage,omitempty"`
EnableSamplePipeline bool `json:"enableSamplePipeline"`
// Launcher/Executor image used during pipeline execution.
ArgoLauncherImage string `json:"argoLauncherImage,omitempty"`
// Driver image used during pipeline execution.
ArgoDriverImage string `json:"argoDriverImage,omitempty"`
// Generic runtime image used for building managed pipelines during
// api server init, and for basic runtime operations.
RuntimeGenericImage string `json:"runtimeGenericImage,omitempty"`
// Toolbox image used for basic container spec runtime operations
// in managed pipelines.
ToolboxImage string `json:"toolboxImage,omitempty"`
// RhelAI image used for ilab tasks in managed pipelines.
RHELAIImage string `json:"rhelAIImage,omitempty"`
// Enable various managed pipelines on this DSP API server.
ManagedPipelines *ManagedPipelinesSpec `json:"managedPipelines,omitempty"`
// Specify custom Pod resource requirements for this component.
Resources *ResourceRequirements `json:"resources,omitempty"`
// Specify init container resource requirements. The init container
// is used to build managed-pipelines and store them in a shared volume.
InitResources *ResourceRequirements `json:"initResources,omitempty"`

// If the Object store/DB is behind a TLS secured connection that is
// unrecognized by the host OpenShift/K8s cluster, then you can
Expand Down
45 changes: 45 additions & 0 deletions api/v1/zz_generated.deepcopy.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading