From 61dc2332d33e006420a3a6b07cace84f9bbd834b Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Mon, 3 Feb 2025 12:57:15 -0500 Subject: [PATCH 01/14] update upload-artifact in gh ci Signed-off-by: Humair Khan --- .github/workflows/build-prs-trigger.yaml | 2 +- .github/workflows/build-prs.yml | 26 ++++++++++++------------ .github/workflows/release_create.yaml | 24 +++++++++++----------- .github/workflows/release_trigger.yaml | 2 +- 4 files changed, 27 insertions(+), 27 deletions(-) diff --git a/.github/workflows/build-prs-trigger.yaml b/.github/workflows/build-prs-trigger.yaml index f620062c1..ddea2b16c 100644 --- a/.github/workflows/build-prs-trigger.yaml +++ b/.github/workflows/build-prs-trigger.yaml @@ -29,7 +29,7 @@ jobs: echo ${{ github.event.pull_request.state }} >> ./pr/pr_state echo ${{ github.event.pull_request.head.sha }} >> ./pr/head_sha echo ${{ github.event.action }} >> ./pr/event_action - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: pr path: pr/ diff --git a/.github/workflows/build-prs.yml b/.github/workflows/build-prs.yml index d6d46be9b..dd40b646f 100644 --- a/.github/workflows/build-prs.yml +++ b/.github/workflows/build-prs.yml @@ -25,24 +25,24 @@ jobs: event_action: ${{ steps.vars.outputs.event_action }} steps: - name: 'Download artifact' - uses: actions/github-script@v3.1.0 + uses: actions/github-script@v6 with: script: | - var artifacts = await github.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: ${{github.event.workflow_run.id }}, + let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{github.event.workflow_run.id}}, }); - var matchArtifact = artifacts.data.artifacts.filter((artifact) => { - return artifact.name == "pr" + let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => { + return artifact.name == "pr" })[0]; - var download = await github.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifact.id, - archive_format: 'zip', + let download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', }); - var fs = require('fs'); + let fs = require('fs'); fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data)); - run: unzip pr.zip - shell: bash diff --git a/.github/workflows/release_create.yaml b/.github/workflows/release_create.yaml index 73cc2e21e..840826bac 100644 --- a/.github/workflows/release_create.yaml +++ b/.github/workflows/release_create.yaml @@ -29,21 +29,21 @@ jobs: uses: actions/github-script@v3.1.0 with: script: | - var artifacts = await github.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: ${{github.event.workflow_run.id }}, + let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{github.event.workflow_run.id}}, }); - var matchArtifact = artifacts.data.artifacts.filter((artifact) => { - return artifact.name == "pr" + let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => { + return artifact.name == "pr" })[0]; - var download = await github.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifact.id, - archive_format: 'zip', + let download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', }); - var fs = require('fs'); + let fs = require('fs'); fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data)); - run: unzip pr.zip - shell: bash diff --git a/.github/workflows/release_trigger.yaml b/.github/workflows/release_trigger.yaml index 2e36f147f..37a4017c3 100644 --- a/.github/workflows/release_trigger.yaml +++ b/.github/workflows/release_trigger.yaml @@ -22,7 +22,7 @@ jobs: PR_STATE: ${{ github.event.pull_request.state }} PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} run: ./.github/scripts/release_trigger/upload-data.sh - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: pr path: pr/ From f6689a82623cb336a3d437aada3d9f645a0b1b4c Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Mon, 3 Feb 2025 12:26:14 -0500 Subject: [PATCH 02/14] support init pipeline build step * parameterize managed pipeline images in dspo configs * add runtime image and supporting ci * update compatibility doc for 2.10 Signed-off-by: Humair Khan --- .../scripts/release_prep/create_branches.sh | 16 +- .github/scripts/release_prep/prereqs.sh | 4 +- .github/workflows/build-tags.yml | 38 ++ .github/workflows/release_prep.yaml | 10 +- api/v1/dspipeline_types.go | 16 +- api/v1alpha1/dspipeline_types.go | 16 +- config/base/kustomization.yaml | 21 + config/base/params.env | 3 + config/configmaps/files/config.yaml | 3 + ...b.io_datasciencepipelinesapplications.yaml | 26 ++ .../apiserver/default/deployment.yaml.tmpl | 31 ++ config/manager/manager.yaml | 6 + controllers/config/defaults.go | 3 + controllers/dspipeline_params.go | 7 + .../testdata/declarative/case_0/config.yaml | 3 + .../created/apiserver_deployment.yaml | 31 ++ .../testdata/declarative/case_1/config.yaml | 3 + .../testdata/declarative/case_2/config.yaml | 4 +- .../created/apiserver_deployment.yaml | 31 ++ .../testdata/declarative/case_3/config.yaml | 3 + .../created/apiserver_deployment.yaml | 31 ++ .../testdata/declarative/case_4/config.yaml | 3 + .../created/apiserver_deployment.yaml | 31 ++ .../testdata/declarative/case_5/config.yaml | 3 + .../created/apiserver_deployment.yaml | 31 ++ .../testdata/declarative/case_6/config.yaml | 3 + .../created/apiserver_deployment.yaml | 31 ++ docs/release/compatibility.md | 15 +- docs/release/compatibility.yaml | 407 +++++++++--------- scripts/release/params.py | 3 + 30 files changed, 617 insertions(+), 216 deletions(-) diff --git a/.github/scripts/release_prep/create_branches.sh b/.github/scripts/release_prep/create_branches.sh index 885b21084..b33716db2 100755 --- a/.github/scripts/release_prep/create_branches.sh +++ b/.github/scripts/release_prep/create_branches.sh @@ -18,7 +18,21 @@ git clone \ --branch=master \ https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${DSP_REPOSITORY_FULL} \ ${DSP_DIR} -cd ${DSP_DIR} +pushd ${DSP_DIR} git checkout -B ${MINOR_RELEASE_BRANCH} git push origin ${MINOR_RELEASE_BRANCH} echo "::notice:: Created DSP ${MINOR_RELEASE_BRANCH} branch" +popd + +echo "Current branches in ${DSP_PIPELINES_REPOSITORY_FULL}" +DSP_PIPELINES_DIR=$(dirname ${WORKING_DIR})/ilab-on-ocp +git clone \ + --depth=1 \ + --branch=main \ + https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${DSP_PIPELINES_REPOSITORY_FULL} \ + ${DSP_PIPELINES_DIR} +pushd ${DSP_PIPELINES_DIR} +git checkout -B ${MINOR_RELEASE_BRANCH} +git push origin ${MINOR_RELEASE_BRANCH} +echo "::notice:: Created DSP Pipelines ${MINOR_RELEASE_BRANCH} branch" +popd diff --git a/.github/scripts/release_prep/prereqs.sh b/.github/scripts/release_prep/prereqs.sh index 3100c0008..b72f602fa 100755 --- a/.github/scripts/release_prep/prereqs.sh +++ b/.github/scripts/release_prep/prereqs.sh @@ -14,6 +14,7 @@ check_branch_exists(){ check_branch_exists ${DSPO_REPOSITORY_FULL} ${MINOR_RELEASE_BRANCH} check_branch_exists ${DSP_REPOSITORY_FULL} ${MINOR_RELEASE_BRANCH} +check_branch_exists ${DSP_PIPELINES_REPOSITORY_FULL} ${MINOR_RELEASE_BRANCH} echo "Ensure compatibility.yaml is upto date, and generate a new compatibility.md. Use [release-tools] to accomplish this" @@ -27,7 +28,8 @@ git checkout -B ${BRANCH_NAME} echo "Created branch: ${BRANCH_NAME}" echo "Checking if compatibility.yaml contains ${TARGET_RELEASE} release...." -contains_rel=$(cat docs/release/compatibility.yaml | rel=${MINOR_RELEASE_WILDCARD} yq '[.[].dsp] | contains([env(rel)])') +# convert rel to string in env(rel) explicitly to avoid comparing str to yq float +contains_rel=$(cat docs/release/compatibility.yaml | rel=${MINOR_RELEASE_WILDCARD} yq '[.[].dsp] | contains([""+env(rel)])') if [[ "$contains_rel" == "false" ]]; then diff --git a/.github/workflows/build-tags.yml b/.github/workflows/build-tags.yml index 25ed79bc4..73cd4829d 100644 --- a/.github/workflows/build-tags.yml +++ b/.github/workflows/build-tags.yml @@ -13,6 +13,11 @@ on: default: 'v1.0.x' description: 'Source branch to build DSP from' required: true + src_pipelines_branch: + type: string + default: 'v1.0.x' + description: 'Source branch to build DSP Pipelines Generic Image from' + required: true target_tag: type: string default: 'vx.y.z' @@ -28,6 +33,11 @@ on: default: 'opendatahub-io/data-science-pipelines' description: 'DSP org/repo' required: true + pipelines_org_repo: + type: string + default: 'opendatahub-io/ilab-on-ocp' + description: 'DSP Pipelines org/repo' + required: true overwrite_imgs: type: string default: 'true' @@ -45,6 +55,11 @@ on: default: 'v1.0.x' description: 'Source branch to build DSP from' required: true + src_pipelines_branch: + type: string + default: 'v1.0.x' + description: 'Source branch to build DSP Pipelines Generic Image from' + required: true target_tag: default: 'vx.y.z' description: 'Target Image Tag' @@ -57,6 +72,11 @@ on: default: 'opendatahub-io/data-science-pipelines' description: 'DSP org/repo' required: true + pipelines_org_repo: + type: string + default: 'opendatahub-io/ilab-on-ocp' + description: 'DSP Pipelines org/repo' + required: true overwrite_imgs: type: string default: 'false' @@ -70,8 +90,10 @@ env: IMAGE_REPO_SWF: ds-pipelines-scheduledworkflow IMAGE_REPO_LAUNCHER: ds-pipelines-launcher IMAGE_REPO_DRIVER: ds-pipelines-driver + IMAGE_REPO_PIELINES_RUNTIME_GENERIC: ds-pipelines-runtime-generic SOURCE_DSPO_BRANCH: ${{ inputs.src_dspo_branch }} SOURCE_DSP_BRANCH: ${{ inputs.src_dsp_branch }} + SOURCE_DSP_PIPELINES_BRANCH: ${{ inputs.src_pipelines_branch }} QUAY_ORG: ${{ inputs.quay_org }} QUAY_ID: ${{ secrets.QUAY_ID }} QUAY_TOKEN: ${{ secrets.QUAY_TOKEN }} @@ -189,3 +211,19 @@ jobs: DOCKERFILE: backend/Dockerfile.launcher GH_REPO: ${{ inputs.dsp_org_repo }} OVERWRITE: ${{ env.OVERWRITE_IMAGES }} + + RUNTIME-GENERIC-build: + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - uses: actions/checkout@v3 + - uses: ./.github/actions/build + name: Build Image + env: + SOURCE_BRANCH: ${{ env.SOURCE_DSP_PIPELINES_BRANCH }} + with: + IMAGE_REPO: ${{ env.IMAGE_REPO_PIELINES_RUNTIME_GENERIC }} + DOCKERFILE: Dockerfile + GH_REPO: ${{ inputs.pipelines_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} diff --git a/.github/workflows/release_prep.yaml b/.github/workflows/release_prep.yaml index 1015c62c5..7976f95e7 100644 --- a/.github/workflows/release_prep.yaml +++ b/.github/workflows/release_prep.yaml @@ -41,6 +41,7 @@ env: DSPO_REPOSITORY_FULL: ${{ inputs.gh_org }}/data-science-pipelines-operator DSP_REPOSITORY: data-science-pipelines DSP_REPOSITORY_FULL: ${{ inputs.gh_org }}/data-science-pipelines + DSP_PIPELINES_REPOSITORY_FULL: ${{ inputs.gh_org }}/ilab-on-ocp PREVIOUS_RELEASE_TAG: ${{ inputs.previous_release_tag }} OVERWRITE_IMAGES: ${{ inputs.overwrite_imgs }} CONFIG_TEMPLATE: "./.github/scripts/release_prep/templates/config.yaml" @@ -66,6 +67,7 @@ jobs: MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }} DSPO_REPOSITORY_FULL: ${{ env.DSPO_REPOSITORY_FULL }} DSPO_REPOSITORY: ${{ env.DSPO_REPOSITORY }} + DSP_PIPELINES_REPOSITORY_FULL: ${{ env.DSP_PIPELINES_REPOSITORY_FULL }} run: ./.github/scripts/release_prep/prereqs.sh create_branches: @@ -86,6 +88,7 @@ jobs: MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }} DSPO_REPOSITORY_FULL: ${{ env.DSPO_REPOSITORY_FULL }} DSP_REPOSITORY_FULL: ${{ env.DSP_REPOSITORY_FULL }} + DSP_PIPELINES_REPOSITORY_FULL: ${{ env.DSP_PIPELINES_REPOSITORY_FULL }} WORKING_DIR: ${{ github.workspace }} run: ./.github/scripts/release_prep/create_branches.sh @@ -100,7 +103,8 @@ jobs: MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }} MINOR_RELEASE_TAG: ${{ env.MINOR_RELEASE_TAG }} QUAY_ORG: ${{ env.QUAY_ORG }} - DSPO_GH_ORG: ${{ env.DSP_REPOSITORY_FULL }} + DSP_GH_ORG_REPO: ${{ env.DSP_REPOSITORY_FULL }} + DSP_PIPELINES_GH_ORG_REPO: ${{ env.DSP_PIPELINES_REPOSITORY_FULL }} OVERWRITE_IMAGES: ${{ env.OVERWRITE_IMAGES }} steps: - run: echo "Storing env vars for re-usable workflow." @@ -113,9 +117,11 @@ jobs: with: src_dspo_branch: ${{ needs.get-env-vars.outputs.MINOR_RELEASE_BRANCH }} src_dsp_branch: ${{ needs.get-env-vars.outputs.MINOR_RELEASE_BRANCH }} + src_pipelines_branch: ${{ needs.get-env-vars.outputs.MINOR_RELEASE_BRANCH }} target_tag: ${{ needs.get-env-vars.outputs.MINOR_RELEASE_TAG }} quay_org: ${{ needs.get-env-vars.outputs.QUAY_ORG }} - dsp_org_repo: ${{ needs.get-env-vars.outputs.DSPO_GH_ORG }} + dsp_org_repo: ${{ needs.get-env-vars.outputs.DSP_GH_ORG_REPO }} + pipelines_org_repo: ${{ needs.get-env-vars.outputs.DSP_PIPELINES_GH_ORG_REPO }} overwrite_imgs: ${{ needs.get-env-vars.outputs.OVERWRITE_IMAGES }} secrets: inherit diff --git a/api/v1/dspipeline_types.go b/api/v1/dspipeline_types.go index 8f6253318..adc383720 100644 --- a/api/v1/dspipeline_types.go +++ b/api/v1/dspipeline_types.go @@ -69,9 +69,19 @@ type APIServer struct { // Include sample pipelines with the deployment of this DSP API Server. Default: true // +kubebuilder:default:=false // +kubebuilder:validation:Optional - EnableSamplePipeline bool `json:"enableSamplePipeline"` - ArgoLauncherImage string `json:"argoLauncherImage,omitempty"` - ArgoDriverImage string `json:"argoDriverImage,omitempty"` + EnableSamplePipeline bool `json:"enableSamplePipeline"` + // Launcher/Executor image used during pipeline execution. + ArgoLauncherImage string `json:"argoLauncherImage,omitempty"` + // Driver image used during pipeline execution. + ArgoDriverImage string `json:"argoDriverImage,omitempty"` + // Generic runtime image used for building managed pipelines during + // api server init, and for basic runtime operations. + RuntimeGenericImage string `json:"runtimeGenericImage,omitempty"` + // Toolbox image used for basic container spec runtime operations + // in managed pipelines. + ToolboxImage string `json:"toolboxImage,omitempty"` + // RhelAI image used for ilab tasks in managed pipelines. + RHELAIImage string `json:"rhelAIImage,omitempty"` // Specify custom Pod resource requirements for this component. Resources *ResourceRequirements `json:"resources,omitempty"` diff --git a/api/v1alpha1/dspipeline_types.go b/api/v1alpha1/dspipeline_types.go index 6a01914ac..ff07fc70c 100644 --- a/api/v1alpha1/dspipeline_types.go +++ b/api/v1alpha1/dspipeline_types.go @@ -69,9 +69,19 @@ type APIServer struct { // Include sample pipelines with the deployment of this DSP API Server. Default: true // +kubebuilder:default:=false // +kubebuilder:validation:Optional - EnableSamplePipeline bool `json:"enableSamplePipeline"` - ArgoLauncherImage string `json:"argoLauncherImage,omitempty"` - ArgoDriverImage string `json:"argoDriverImage,omitempty"` + EnableSamplePipeline bool `json:"enableSamplePipeline"` + // Launcher/Executor image used during pipeline execution. + ArgoLauncherImage string `json:"argoLauncherImage,omitempty"` + // Driver image used during pipeline execution. + ArgoDriverImage string `json:"argoDriverImage,omitempty"` + // Generic runtime image used for building managed pipelines during + // api server init, and for basic runtime operations. + RuntimeGenericImage string `json:"runtimeGenericImage,omitempty"` + // Toolbox image used for basic container spec runtime operations + // in managed pipelines. + ToolboxImage string `json:"toolboxImage,omitempty"` + // RhelAI image used for ilab tasks in managed pipelines. + RHELAIImage string `json:"rhelAIImage,omitempty"` // Specify custom Pod resource requirements for this component. Resources *ResourceRequirements `json:"resources,omitempty"` diff --git a/config/base/kustomization.yaml b/config/base/kustomization.yaml index 4cfe5112b..2728a140f 100644 --- a/config/base/kustomization.yaml +++ b/config/base/kustomization.yaml @@ -102,6 +102,27 @@ vars: apiVersion: v1 fieldref: fieldpath: data.IMAGES_ARGO_WORKFLOWCONTROLLER + - name: IMAGES_PIPELINESRUNTIMEGENERIC + objref: + kind: ConfigMap + name: dspo-parameters + apiVersion: v1 + fieldref: + fieldpath: data.IMAGES_PIPELINESRUNTIMEGENERIC + - name: IMAGES_TOOLBOX + objref: + kind: ConfigMap + name: dspo-parameters + apiVersion: v1 + fieldref: + fieldpath: data.IMAGES_TOOLBOX + - name: IMAGES_RHELAI + objref: + kind: ConfigMap + name: dspo-parameters + apiVersion: v1 + fieldref: + fieldpath: data.IMAGES_RHELAI # DSPO level configs - name: ZAP_LOG_LEVEL diff --git a/config/base/params.env b/config/base/params.env index e0c44b4e9..f7475f249 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -7,6 +7,9 @@ IMAGES_ARGO_WORKFLOWCONTROLLER=quay.io/opendatahub/ds-pipelines-argo-workflowcon IMAGES_LAUNCHER=quay.io/opendatahub/ds-pipelines-launcher:latest IMAGES_DRIVER=quay.io/opendatahub/ds-pipelines-driver:latest IMAGES_MLMDGRPC=quay.io/opendatahub/mlmd-grpc-server:latest +IMAGES_PIPELINESRUNTIMEGENERIC=quay.io/opendatahub/ds-pipelines-runtime-generic:latest +IMAGES_TOOLBOX=registry.redhat.io/ubi9/toolbox@sha256:da31dee8904a535d12689346e65e5b00d11a6179abf1fa69b548dbd755fa2770 +IMAGES_RHELAI=registry.redhat.io/rhelai1/instructlab-nvidia-rhel9@sha256:05cfba1fb13ed54b1de4d021da2a31dd78ba7d8cc48e10c7fe372815899a18ae IMAGES_MLMDENVOY=registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:b30d60cd458133430d4c92bf84911e03cecd02f60e88a58d1c6c003543cf833a IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:f0ee0d27bb784e289f7d88cc8ee0e085ca70e88a5d126562105542f259a1ac01 IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:8ce44de8c683f198bf24ba36cd17e89708153d11f5b42c0a27e77f8fdb233551 diff --git a/config/configmaps/files/config.yaml b/config/configmaps/files/config.yaml index 8e0feeb56..b334582d8 100644 --- a/config/configmaps/files/config.yaml +++ b/config/configmaps/files/config.yaml @@ -10,6 +10,9 @@ Images: DriverImage: $(IMAGES_DRIVER) OAuthProxy: $(IMAGES_OAUTHPROXY) MariaDB: $(IMAGES_MARIADB) + RuntimeGeneric: $(IMAGES_PIPELINESRUNTIMEGENERIC) + Toolbox: $(IMAGES_TOOLBOX) + RHELAI: $(IMAGES_RHELAI) DSPO: HealthCheck: Database: diff --git a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml index 380aaeb5c..e45c1d8ba 100644 --- a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml +++ b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml @@ -41,8 +41,10 @@ spec: description: DS Pipelines API Server configuration. properties: argoDriverImage: + description: Driver image used during pipeline execution. type: string argoLauncherImage: + description: Launcher/Executor image used during pipeline execution. type: string artifactSignedURLExpirySeconds: default: 60 @@ -153,6 +155,17 @@ spec: x-kubernetes-int-or-string: true type: object type: object + rhelAIImage: + description: RhelAI image used for ilab tasks in managed pipelines. + type: string + runtimeGenericImage: + description: Generic runtime image used for building managed pipelines + during api server init, and for basic runtime operations. + type: string + toolboxImage: + description: Toolbox image used for basic container spec runtime + operations in managed pipelines. + type: string type: object database: default: @@ -894,8 +907,10 @@ spec: removed in the future.' type: boolean argoDriverImage: + description: Driver image used during pipeline execution. type: string argoLauncherImage: + description: Launcher/Executor image used during pipeline execution. type: string artifactImage: description: 'Deprecated: DSP V1 only, will be removed in the @@ -1049,6 +1064,13 @@ spec: x-kubernetes-int-or-string: true type: object type: object + rhelAIImage: + description: RhelAI image used for ilab tasks in managed pipelines. + type: string + runtimeGenericImage: + description: Generic runtime image used for building managed pipelines + during api server init, and for basic runtime operations. + type: string stripEOF: default: true description: 'Default: true Deprecated: DSP V1 only, will be removed @@ -1064,6 +1086,10 @@ spec: - StoppedRunFinally - CancelledRunFinally type: string + toolboxImage: + description: Toolbox image used for basic container spec runtime + operations in managed pipelines. + type: string trackArtifacts: default: true description: 'Default: true Deprecated: DSP V1 only, will be removed diff --git a/config/internal/apiserver/default/deployment.yaml.tmpl b/config/internal/apiserver/default/deployment.yaml.tmpl index ee555d763..a25496508 100644 --- a/config/internal/apiserver/default/deployment.yaml.tmpl +++ b/config/internal/apiserver/default/deployment.yaml.tmpl @@ -20,6 +20,32 @@ spec: component: data-science-pipelines dspa: {{.Name}} spec: + initContainers: + - name: init-pipelines + image: {{.APIServer.RuntimeGenericImage}} + workingDir: /opt/app-root/src/pipelines/distributed-ilab + command: [ '/bin/sh', '-c'] + args: + - "make pipeline && cp pipeline.yaml ${BUILD_FOLDER}" + env: + - name: BUILD_FOLDER + value: /opt/app-root/src/build + - name: PYTHON_IMAGE + value: {{.APIServer.RuntimeGenericImage}} + - name: TOOLBOX_IMAGE + value: {{.APIServer.ToolboxImage}} + - name: RHELAI_IMAGE + value: {{.APIServer.RHELAIImage}} + resources: + limits: + memory: 200Mi + cpu: '1' + requests: + memory: 200Mi + cpu: '1' + volumeMounts: + - mountPath: /opt/app-root/src/build + name: managed-pipelines containers: - env: {{ if .IncludeOwnerReference }} @@ -202,6 +228,8 @@ spec: - name: server-config mountPath: /config/config.json subPath: {{ .APIServer.CustomServerConfig.Key }} + - mountPath: /config/managed-pipelines + name: managed-pipelines {{ if .PodToPodTLS }} - mountPath: /etc/tls/private name: proxy-tls @@ -282,6 +310,9 @@ spec: - name: server-config configMap: name: {{ .APIServer.CustomServerConfig.Name }} + - name: managed-pipelines + emptyDir: + sizeLimit: 10Mi {{ if .CustomCABundle }} - name: ca-bundle configMap: diff --git a/config/manager/manager.yaml b/config/manager/manager.yaml index 484d6284d..281407679 100644 --- a/config/manager/manager.yaml +++ b/config/manager/manager.yaml @@ -59,6 +59,12 @@ spec: value: $(IMAGES_OAUTHPROXY) - name: IMAGES_MARIADB value: $(IMAGES_MARIADB) + - name: IMAGES_RUNTIMEGENERIC + value: $(IMAGES_PIPELINESRUNTIMEGENERIC) + - name: IMAGES_TOOLBOX + value: $(IMAGES_TOOLBOX) + - name: IMAGES_RHELAI + value: $(IMAGES_RHELAI) - name: ZAP_LOG_LEVEL value: $(ZAP_LOG_LEVEL) - name: MAX_CONCURRENT_RECONCILES diff --git a/controllers/config/defaults.go b/controllers/config/defaults.go index 67eedd43b..a473d0b25 100644 --- a/controllers/config/defaults.go +++ b/controllers/config/defaults.go @@ -100,6 +100,9 @@ const ( ArgoWorkflowControllerImagePath = "Images.ArgoWorkflowController" MariaDBImagePath = "Images.MariaDB" OAuthProxyImagePath = "Images.OAuthProxy" + RuntimeGenericPath = "Images.RuntimeGeneric" + ToolboxImagePath = "Images.Toolbox" + RHELAIImagePath = "Images.RHELAI" // Other configs ObjStoreConnectionTimeoutConfigName = "DSPO.HealthCheck.ObjectStore.ConnectionTimeout" diff --git a/controllers/dspipeline_params.go b/controllers/dspipeline_params.go index 4efdbe4af..2a4f3943b 100644 --- a/controllers/dspipeline_params.go +++ b/controllers/dspipeline_params.go @@ -595,10 +595,17 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip serverImageFromConfig := config.GetStringConfigWithDefault(config.APIServerImagePath, config.DefaultImageValue) argoLauncherImageFromConfig := config.GetStringConfigWithDefault(config.LauncherImagePath, config.DefaultImageValue) argoDriverImageFromConfig := config.GetStringConfigWithDefault(config.DriverImagePath, config.DefaultImageValue) + runtimeGenericImageFromConfig := config.GetStringConfigWithDefault(config.RuntimeGenericPath, config.DefaultImageValue) + toolboxImageFromConfig := config.GetStringConfigWithDefault(config.ToolboxImagePath, config.DefaultImageValue) + rhelAIImageFromConfig := config.GetStringConfigWithDefault(config.RHELAIImagePath, config.DefaultImageValue) setStringDefault(serverImageFromConfig, &p.APIServer.Image) setStringDefault(argoLauncherImageFromConfig, &p.APIServer.ArgoLauncherImage) setStringDefault(argoDriverImageFromConfig, &p.APIServer.ArgoDriverImage) + setStringDefault(runtimeGenericImageFromConfig, &p.APIServer.RuntimeGenericImage) + setStringDefault(toolboxImageFromConfig, &p.APIServer.ToolboxImage) + setStringDefault(rhelAIImageFromConfig, &p.APIServer.RHELAIImage) + setResourcesDefault(config.APIServerResourceRequirements, &p.APIServer.Resources) if p.APIServer.CustomServerConfig == nil { diff --git a/controllers/testdata/declarative/case_0/config.yaml b/controllers/testdata/declarative/case_0/config.yaml index 6f35e7975..06225a2d7 100644 --- a/controllers/testdata/declarative/case_0/config.yaml +++ b/controllers/testdata/declarative/case_0/config.yaml @@ -13,6 +13,9 @@ Images: MariaDB: mariadb:test0 MlPipelineUI: frontend:test0 Minio: minio:test0 + RuntimeGeneric: runtimegeneric:test0 + Toolbox: toolbox:test0 + RHELAI: rhelai:test0 DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml index 4a9529835..8a571c56e 100644 --- a/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml @@ -22,6 +22,32 @@ spec: component: data-science-pipelines dspa: testdsp0 spec: + initContainers: + - name: init-pipelines + image: runtimegeneric:test0 + workingDir: /opt/app-root/src/pipelines/distributed-ilab + command: ['/bin/sh', '-c'] + args: + - "make pipeline && cp pipeline.yaml ${BUILD_FOLDER}" + env: + - name: BUILD_FOLDER + value: /opt/app-root/src/build + - name: PYTHON_IMAGE + value: runtimegeneric:test0 + - name: TOOLBOX_IMAGE + value: toolbox:test0 + - name: RHELAI_IMAGE + value: rhelai:test0 + resources: + limits: + memory: 200Mi + cpu: '1' + requests: + memory: 200Mi + cpu: '1' + volumeMounts: + - mountPath: /opt/app-root/src/build + name: managed-pipelines containers: - env: - name: POD_NAMESPACE @@ -150,6 +176,8 @@ spec: - name: server-config mountPath: /config/config.json subPath: config.json + - mountPath: /config/managed-pipelines + name: managed-pipelines - mountPath: /config/sample_config.json name: sample-config subPath: sample_config.json @@ -211,6 +239,9 @@ spec: configMap: defaultMode: 420 name: ds-pipeline-server-config-testdsp0 + - name: managed-pipelines + emptyDir: + sizeLimit: 10Mi - configMap: defaultMode: 420 name: sample-config-testdsp0 diff --git a/controllers/testdata/declarative/case_1/config.yaml b/controllers/testdata/declarative/case_1/config.yaml index 1c0e4cfbc..c1f31d850 100644 --- a/controllers/testdata/declarative/case_1/config.yaml +++ b/controllers/testdata/declarative/case_1/config.yaml @@ -12,3 +12,6 @@ Images: MariaDB: mariadb:test1 MlPipelineUI: frontend:test1 Minio: minio:test1 + RuntimeGeneric: runtimegeneric:test1 + Toolbox: toolbox:test1 + RHELAI: rhelai:test1 diff --git a/controllers/testdata/declarative/case_2/config.yaml b/controllers/testdata/declarative/case_2/config.yaml index 1929d99a8..f9271d908 100644 --- a/controllers/testdata/declarative/case_2/config.yaml +++ b/controllers/testdata/declarative/case_2/config.yaml @@ -12,7 +12,9 @@ Images: MariaDB: mariadb:test2 MlPipelineUI: frontend:test2 Minio: minio:test2 - + RuntimeGeneric: runtimegeneric:test2 + Toolbox: toolbox:test2 + RHELAI: rhelai:test2 DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml index 66c7e8332..a158666bd 100644 --- a/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml @@ -22,6 +22,32 @@ spec: component: data-science-pipelines dspa: testdsp2 spec: + initContainers: + - name: init-pipelines + image: runtimegeneric:test0 + workingDir: /opt/app-root/src/pipelines/distributed-ilab + command: ['/bin/sh', '-c'] + args: + - "make pipeline && cp pipeline.yaml ${BUILD_FOLDER}" + env: + - name: BUILD_FOLDER + value: /opt/app-root/src/build + - name: PYTHON_IMAGE + value: runtimegeneric:test2 + - name: TOOLBOX_IMAGE + value: toolbox:test2 + - name: RHELAI_IMAGE + value: rhelai:test2 + resources: + limits: + memory: 200Mi + cpu: '1' + requests: + memory: 200Mi + cpu: '1' + volumeMounts: + - mountPath: /opt/app-root/src/build + name: managed-pipelines containers: - env: - name: POD_NAMESPACE @@ -150,6 +176,8 @@ spec: - name: server-config mountPath: /config/config.json subPath: testserverconfigmapkeydspa2 + - mountPath: /config/managed-pipelines + name: managed-pipelines - mountPath: /config/sample_config.json name: sample-config subPath: sample_config.json @@ -211,6 +239,9 @@ spec: configMap: defaultMode: 420 name: testserverconfigmapdspa2 + - name: managed-pipelines + emptyDir: + sizeLimit: 10Mi - configMap: defaultMode: 420 name: sample-config-testdsp2 diff --git a/controllers/testdata/declarative/case_3/config.yaml b/controllers/testdata/declarative/case_3/config.yaml index bf6a27e12..747ee1b4a 100644 --- a/controllers/testdata/declarative/case_3/config.yaml +++ b/controllers/testdata/declarative/case_3/config.yaml @@ -12,6 +12,9 @@ Images: MariaDB: mariadb:test3 MlPipelineUI: frontend:test3 Minio: minio:test3 + RuntimeGeneric: runtimegeneric:test3 + Toolbox: toolbox:test3 + RHELAI: rhelai:test3 DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml index 8646f1eac..61af0ffb0 100644 --- a/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml @@ -22,6 +22,32 @@ spec: component: data-science-pipelines dspa: testdsp3 spec: + initContainers: + - name: init-pipelines + image: runtimegeneric:test0 + workingDir: /opt/app-root/src/pipelines/distributed-ilab + command: ['/bin/sh', '-c'] + args: + - "make pipeline && cp pipeline.yaml ${BUILD_FOLDER}" + env: + - name: BUILD_FOLDER + value: /opt/app-root/src/build + - name: PYTHON_IMAGE + value: runtimegeneric:test3 + - name: TOOLBOX_IMAGE + value: toolbox:test3 + - name: RHELAI_IMAGE + value: rhelai:test3 + resources: + limits: + memory: 200Mi + cpu: '1' + requests: + memory: 200Mi + cpu: '1' + volumeMounts: + - mountPath: /opt/app-root/src/build + name: managed-pipelines containers: - env: - name: POD_NAMESPACE @@ -142,6 +168,8 @@ spec: - name: server-config mountPath: /config/config.json subPath: config.json + - mountPath: /config/managed-pipelines + name: managed-pipelines resources: requests: cpu: 250m @@ -205,4 +233,7 @@ spec: configMap: name: ds-pipeline-server-config-testdsp3 defaultMode: 420 + - name: managed-pipelines + emptyDir: + sizeLimit: 10Mi serviceAccountName: ds-pipeline-testdsp3 diff --git a/controllers/testdata/declarative/case_4/config.yaml b/controllers/testdata/declarative/case_4/config.yaml index b7e8a2aef..c8eafce57 100644 --- a/controllers/testdata/declarative/case_4/config.yaml +++ b/controllers/testdata/declarative/case_4/config.yaml @@ -12,6 +12,9 @@ Images: MariaDB: mariadb:test4 MlPipelineUI: frontend:test4 Minio: minio:test4 + RuntimeGeneric: runtimegeneric:test4 + Toolbox: toolbox:test4 + RHELAI: rhelai:test4 DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml index b0474ff87..fe3b87d0f 100644 --- a/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml @@ -22,6 +22,32 @@ spec: component: data-science-pipelines dspa: testdsp4 spec: + initContainers: + - name: init-pipelines + image: runtimegeneric:test0 + workingDir: /opt/app-root/src/pipelines/distributed-ilab + command: ['/bin/sh', '-c'] + args: + - "make pipeline && cp pipeline.yaml ${BUILD_FOLDER}" + env: + - name: BUILD_FOLDER + value: /opt/app-root/src/build + - name: PYTHON_IMAGE + value: runtimegeneric:test4 + - name: TOOLBOX_IMAGE + value: toolbox:test4 + - name: RHELAI_IMAGE + value: rhelai:test4 + resources: + limits: + memory: 200Mi + cpu: '1' + requests: + memory: 200Mi + cpu: '1' + volumeMounts: + - mountPath: /opt/app-root/src/build + name: managed-pipelines containers: - env: - name: POD_NAMESPACE @@ -149,6 +175,8 @@ spec: - mountPath: /config/config.json name: server-config subPath: config.json + - mountPath: /config/managed-pipelines + name: managed-pipelines - name: oauth-proxy args: - --https-address=:8443 @@ -205,4 +233,7 @@ spec: configMap: defaultMode: 420 name: ds-pipeline-server-config-testdsp4 + - name: managed-pipelines + emptyDir: + sizeLimit: 10Mi serviceAccountName: ds-pipeline-testdsp4 diff --git a/controllers/testdata/declarative/case_5/config.yaml b/controllers/testdata/declarative/case_5/config.yaml index 943a935e4..1216f5c6f 100644 --- a/controllers/testdata/declarative/case_5/config.yaml +++ b/controllers/testdata/declarative/case_5/config.yaml @@ -12,6 +12,9 @@ Images: MariaDB: mariadb:test5 MlPipelineUI: frontend:test5 Minio: minio:test5 + RuntimeGeneric: runtimegeneric:test5 + Toolbox: toolbox:test5 + RHELAI: rhelai:test5 DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml index 03a74eeba..e4abd60b5 100644 --- a/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml @@ -22,6 +22,32 @@ spec: component: data-science-pipelines dspa: testdsp5 spec: + initContainers: + - name: init-pipelines + image: runtimegeneric:test0 + workingDir: /opt/app-root/src/pipelines/distributed-ilab + command: ['/bin/sh', '-c'] + args: + - "make pipeline && cp pipeline.yaml ${BUILD_FOLDER}" + env: + - name: BUILD_FOLDER + value: /opt/app-root/src/build + - name: PYTHON_IMAGE + value: runtimegeneric:test5 + - name: TOOLBOX_IMAGE + value: toolbox:test5 + - name: RHELAI_IMAGE + value: rhelai:test5 + resources: + limits: + memory: 200Mi + cpu: '1' + requests: + memory: 200Mi + cpu: '1' + volumeMounts: + - mountPath: /opt/app-root/src/build + name: managed-pipelines containers: - env: - name: POD_NAMESPACE @@ -149,6 +175,8 @@ spec: - name: server-config mountPath: /config/config.json subPath: config.json + - mountPath: /config/managed-pipelines + name: managed-pipelines - mountPath: /etc/tls/private name: proxy-tls - name: ca-bundle @@ -210,6 +238,9 @@ spec: configMap: name: ds-pipeline-server-config-testdsp5 defaultMode: 420 + - name: managed-pipelines + emptyDir: + sizeLimit: 10Mi - name: ca-bundle configMap: name: dsp-trusted-ca-testdsp5 diff --git a/controllers/testdata/declarative/case_6/config.yaml b/controllers/testdata/declarative/case_6/config.yaml index 32e5951c4..6d26a7246 100644 --- a/controllers/testdata/declarative/case_6/config.yaml +++ b/controllers/testdata/declarative/case_6/config.yaml @@ -12,6 +12,9 @@ Images: MariaDB: mariadb:test6 MlPipelineUI: frontend:test6 Minio: minio:test6 + RuntimeGeneric: runtimegeneric:test6 + Toolbox: toolbox:test6 + RHELAI: rhelai:test6 DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml index f4809db80..37b446fc9 100644 --- a/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml @@ -22,6 +22,32 @@ spec: component: data-science-pipelines dspa: testdsp6 spec: + initContainers: + - name: init-pipelines + image: runtimegeneric:test0 + workingDir: /opt/app-root/src/pipelines/distributed-ilab + command: ['/bin/sh', '-c'] + args: + - "make pipeline && cp pipeline.yaml ${BUILD_FOLDER}" + env: + - name: BUILD_FOLDER + value: /opt/app-root/src/build + - name: PYTHON_IMAGE + value: runtimegeneric:test6 + - name: TOOLBOX_IMAGE + value: toolbox:test6 + - name: RHELAI_IMAGE + value: rhelai:test6 + resources: + limits: + memory: 200Mi + cpu: '1' + requests: + memory: 200Mi + cpu: '1' + volumeMounts: + - mountPath: /opt/app-root/src/build + name: managed-pipelines containers: - env: - name: POD_NAMESPACE @@ -135,6 +161,8 @@ spec: - name: server-config mountPath: /config/config.json subPath: config.json + - mountPath: /config/managed-pipelines + name: managed-pipelines - name: oauth-proxy args: - --https-address=:8443 @@ -191,4 +219,7 @@ spec: configMap: name: ds-pipeline-server-config-testdsp6 defaultMode: 420 + - name: managed-pipelines + emptyDir: + sizeLimit: 10Mi serviceAccountName: ds-pipeline-testdsp6 diff --git a/docs/release/compatibility.md b/docs/release/compatibility.md index a73aad8d9..314a19a88 100644 --- a/docs/release/compatibility.md +++ b/docs/release/compatibility.md @@ -6,6 +6,7 @@ Each row outlines the versions for individual subcomponents and images that are | dsp | kfp | argo | ml-metadata | envoy | ocp-pipelines | oauth-proxy | mariadb-103 | ubi-minimal | ubi-micro | openshift | |-----|-----|-----|-----|-----|-----|-----|-----|-----|-----|-----| +| 2.10 | 2.2.0 | 3.4.17 | 1.14.0 | 1.22.11 | N/A | v4.14 | 1 | N/A | N/A | 4.15,4.16,4.17 | | 2.9 | 2.2.0 | 3.4.17 | 1.14.0 | 1.22.11 | N/A | v4.14 | 1 | N/A | N/A | 4.15,4.16,4.17 | | 2.8 | 2.2.0 | 3.4.17 | 1.14.0 | 1.22.11 | N/A | v4.14 | 1 | N/A | N/A | 4.15,4.16,4.17 | | 2.7 | 2.2.0 | 3.4.17 | 1.14.0 | 1.22.11 | N/A | v4.10 | 1 | N/A | N/A | 4.15,4.16,4.17 | @@ -17,13 +18,13 @@ Each row outlines the versions for individual subcomponents and images that are | 2.1 | 2.0.5 | 3.3.10 | 1.14.0 | 1.22.11 | N/A | v4.10 | 1 | N/A | N/A | 4.13,4.14,4.15 | | 2.0 | 2.0.5 | 3.3.10 | 1.14.0 | 1.22.11 | N/A | v4.10 | 1 | N/A | N/A | 4.12,4.13,4.14 | | 2.0-alpha-1 | 2.0.5 | 3.3.10 | 1.14.0 | 1.12.2 | N/A | v4.10 | 1 | N/A | N/A | 4.12,4.13,4.14 | -| 1.6 | 1.5.1 (tktn) | None | 1.5.0 | 1.8.4 | 1.8 | v4.10 | 1 | 8.8 | 8.8 | 4.11,4.12,4.13 | -| 1.5 | 1.5.1 (tktn) | None | 1.5.0 | 1.8.4 | 1.8 | v4.10 | 1 | 8.8 | 8.8 | 4.11,4.12,4.13 | -| 1.4 | 1.5.1 (tktn) | None | 1.5.0 | 1.8.4 | 1.8 | v4.10 | 1 | 8.8 | 8.8 | 4.11,4.12,4.13 | -| 1.3 | 1.5.1 (tktn) | None | 1.5.0 | 1.8.4 | 1.8 | v4.10 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | -| 1.2 | 1.5.1 (tktn) | None | 1.5.0 | 1.8.4 | 1.8 | v4.10 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | -| 1.1 | 1.5.1 (tktn) | None | 1.5.0 | 1.8.4 | 1.8 | v4.12 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | -| 1.0 | 1.5.1 (tktn) | None | 1.5.0 | 1.8.4 | 1.8 | v4.12 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | +| 1.6 | 1.5.1 (tktn) | N/A | 1.5.0 | 1.8.4 | 1.8 | v4.10 | 1 | 8.8 | 8.8 | 4.11,4.12,4.13 | +| 1.5 | 1.5.1 (tktn) | N/A | 1.5.0 | 1.8.4 | 1.8 | v4.10 | 1 | 8.8 | 8.8 | 4.11,4.12,4.13 | +| 1.4 | 1.5.1 (tktn) | N/A | 1.5.0 | 1.8.4 | 1.8 | v4.10 | 1 | 8.8 | 8.8 | 4.11,4.12,4.13 | +| 1.3 | 1.5.1 (tktn) | N/A | 1.5.0 | 1.8.4 | 1.8 | v4.10 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | +| 1.2 | 1.5.1 (tktn) | N/A | 1.5.0 | 1.8.4 | 1.8 | v4.10 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | +| 1.1 | 1.5.1 (tktn) | N/A | 1.5.0 | 1.8.4 | 1.8 | v4.12 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | +| 1.0 | 1.5.1 (tktn) | N/A | 1.5.0 | 1.8.4 | 1.8 | v4.12 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | diff --git a/docs/release/compatibility.yaml b/docs/release/compatibility.yaml index 8d78a7b62..fdf9e2195 100644 --- a/docs/release/compatibility.yaml +++ b/docs/release/compatibility.yaml @@ -1,198 +1,209 @@ -- dsp: 2.9 - kfp: 2.2.0 - argo: 3.4.17 - ml-metadata: 1.14.0 - envoy: 1.22.11 - ocp-pipelines: "N/A" - oauth-proxy: v4.14 - mariadb-103: 1 - ubi-minimal: "N/A" - ubi-micro: "N/A" - openshift: 4.15,4.16,4.17 -- dsp: 2.8 - kfp: 2.2.0 - argo: 3.4.17 - ml-metadata: 1.14.0 - envoy: 1.22.11 - ocp-pipelines: "N/A" - oauth-proxy: v4.14 - mariadb-103: 1 - ubi-minimal: "N/A" - ubi-micro: "N/A" - openshift: 4.15,4.16,4.17 -- dsp: 2.7 - kfp: 2.2.0 - argo: 3.4.17 - ml-metadata: 1.14.0 - envoy: 1.22.11 - ocp-pipelines: "N/A" - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: "N/A" - ubi-micro: "N/A" - openshift: 4.15,4.16,4.17 -- dsp: 2.6 - kfp: 2.0.5 - argo: 3.3.10 - ml-metadata: 1.14.0 - envoy: 1.22.11 - ocp-pipelines: "N/A" - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: "N/A" - ubi-micro: "N/A" - openshift: 4.14,4.15,4.16 -- dsp: 2.5 - kfp: 2.0.5 - argo: 3.3.10 - ml-metadata: 1.14.0 - envoy: 1.22.11 - ocp-pipelines: "N/A" - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: "N/A" - ubi-micro: "N/A" - openshift: 4.14,4.15,4.16 -- dsp: 2.4 - kfp: 2.0.5 - argo: 3.3.10 - ml-metadata: 1.14.0 - envoy: 1.22.11 - ocp-pipelines: "N/A" - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: "N/A" - ubi-micro: "N/A" - openshift: 4.14,4.15,4.16 -- dsp: 2.3 - kfp: 2.0.5 - argo: 3.3.10 - ml-metadata: 1.14.0 - envoy: 1.22.11 - ocp-pipelines: "N/A" - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: "N/A" - ubi-micro: "N/A" - openshift: 4.13,4.14,4.15 -- dsp: 2.2 - kfp: 2.0.5 - argo: 3.3.10 - ml-metadata: 1.14.0 - envoy: 1.22.11 - ocp-pipelines: "N/A" - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: "N/A" - ubi-micro: "N/A" - openshift: 4.13,4.14,4.15 -- dsp: 2.1 - kfp: 2.0.5 - argo: 3.3.10 - ml-metadata: 1.14.0 - envoy: 1.22.11 - ocp-pipelines: "N/A" - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: "N/A" - ubi-micro: "N/A" - openshift: 4.13,4.14,4.15 -- dsp: 2.0 - kfp: 2.0.5 - argo: 3.3.10 - ml-metadata: 1.14.0 - envoy: 1.22.11 - ocp-pipelines: "N/A" - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: "N/A" - ubi-micro: "N/A" - openshift: 4.12,4.13,4.14 -- dsp: 2.0-alpha-1 - kfp: 2.0.5 - argo: 3.3.10 - ml-metadata: 1.14.0 - envoy: 1.12.2 - ocp-pipelines: "N/A" - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: "N/A" - ubi-micro: "N/A" - openshift: 4.12,4.13,4.14 -- dsp: 1.6 - kfp: 1.5.1 (tktn) - argo: - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: 1.8 - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - openshift: 4.11,4.12,4.13 -- dsp: 1.5 - kfp: 1.5.1 (tktn) - argo: - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: 1.8 - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - openshift: 4.11,4.12,4.13 -- dsp: 1.4 - kfp: 1.5.1 (tktn) - argo: - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: 1.8 - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - openshift: 4.11,4.12,4.13 -- dsp: 1.3 - kfp: 1.5.1 (tktn) - argo: - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: 1.8 - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - openshift: 4.10,4.11,4.12 -- dsp: 1.2 - kfp: 1.5.1 (tktn) - argo: - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: 1.8 - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - openshift: 4.10,4.11,4.12 -- dsp: 1.1 - kfp: 1.5.1 (tktn) - argo: - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: 1.8 - oauth-proxy: v4.12 - mariadb-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - openshift: 4.10,4.11,4.12 -- dsp: 1.0 - kfp: 1.5.1 (tktn) - argo: - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: 1.8 - oauth-proxy: v4.12 - mariadb-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - openshift: 4.10,4.11,4.12 +- dsp: '2.10' + kfp: '2.2.0' + argo: '3.4.17' + ml-metadata: '1.14.0' + envoy: '1.22.11' + ocp-pipelines: 'N/A' + oauth-proxy: 'v4.14' + mariadb-103: '1' + ubi-minimal: 'N/A' + ubi-micro: 'N/A' + openshift: '4.15,4.16,4.17' +- dsp: '2.9' + kfp: '2.2.0' + argo: '3.4.17' + ml-metadata: '1.14.0' + envoy: '1.22.11' + ocp-pipelines: 'N/A' + oauth-proxy: 'v4.14' + mariadb-103: '1' + ubi-minimal: 'N/A' + ubi-micro: 'N/A' + openshift: '4.15,4.16,4.17' +- dsp: '2.8' + kfp: '2.2.0' + argo: '3.4.17' + ml-metadata: '1.14.0' + envoy: '1.22.11' + ocp-pipelines: 'N/A' + oauth-proxy: 'v4.14' + mariadb-103: '1' + ubi-minimal: 'N/A' + ubi-micro: 'N/A' + openshift: '4.15,4.16,4.17' +- dsp: '2.7' + kfp: '2.2.0' + argo: '3.4.17' + ml-metadata: '1.14.0' + envoy: '1.22.11' + ocp-pipelines: 'N/A' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: 'N/A' + ubi-micro: 'N/A' + openshift: '4.15,4.16,4.17' +- dsp: '2.6' + kfp: '2.0.5' + argo: '3.3.10' + ml-metadata: '1.14.0' + envoy: '1.22.11' + ocp-pipelines: 'N/A' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: 'N/A' + ubi-micro: 'N/A' + openshift: '4.14,4.15,4.16' +- dsp: '2.5' + kfp: '2.0.5' + argo: '3.3.10' + ml-metadata: '1.14.0' + envoy: '1.22.11' + ocp-pipelines: 'N/A' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: 'N/A' + ubi-micro: 'N/A' + openshift: '4.14,4.15,4.16' +- dsp: '2.4' + kfp: '2.0.5' + argo: '3.3.10' + ml-metadata: '1.14.0' + envoy: '1.22.11' + ocp-pipelines: 'N/A' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: 'N/A' + ubi-micro: 'N/A' + openshift: '4.14,4.15,4.16' +- dsp: '2.3' + kfp: '2.0.5' + argo: '3.3.10' + ml-metadata: '1.14.0' + envoy: '1.22.11' + ocp-pipelines: 'N/A' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: 'N/A' + ubi-micro: 'N/A' + openshift: '4.13,4.14,4.15' +- dsp: '2.2' + kfp: '2.0.5' + argo: '3.3.10' + ml-metadata: '1.14.0' + envoy: '1.22.11' + ocp-pipelines: 'N/A' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: 'N/A' + ubi-micro: 'N/A' + openshift: '4.13,4.14,4.15' +- dsp: '2.1' + kfp: '2.0.5' + argo: '3.3.10' + ml-metadata: '1.14.0' + envoy: '1.22.11' + ocp-pipelines: 'N/A' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: 'N/A' + ubi-micro: 'N/A' + openshift: '4.13,4.14,4.15' +- dsp: '2.0' + kfp: '2.0.5' + argo: '3.3.10' + ml-metadata: '1.14.0' + envoy: '1.22.11' + ocp-pipelines: 'N/A' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: 'N/A' + ubi-micro: 'N/A' + openshift: '4.12,4.13,4.14' +- dsp: '2.0-alpha-1' + kfp: '2.0.5' + argo: '3.3.10' + ml-metadata: '1.14.0' + envoy: '1.12.2' + ocp-pipelines: 'N/A' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: 'N/A' + ubi-micro: 'N/A' + openshift: '4.12,4.13,4.14' +- dsp: '1.6' + kfp: '1.5.1 (tktn)' + argo: 'N/A' + ml-metadata: '1.5.0' + envoy: '1.8.4' + ocp-pipelines: '1.8' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: '8.8' + ubi-micro: '8.8' + openshift: '4.11,4.12,4.13' +- dsp: '1.5' + kfp: '1.5.1 (tktn)' + argo: 'N/A' + ml-metadata: '1.5.0' + envoy: '1.8.4' + ocp-pipelines: '1.8' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: '8.8' + ubi-micro: '8.8' + openshift: '4.11,4.12,4.13' +- dsp: '1.4' + kfp: '1.5.1 (tktn)' + argo: 'N/A' + ml-metadata: '1.5.0' + envoy: '1.8.4' + ocp-pipelines: '1.8' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: '8.8' + ubi-micro: '8.8' + openshift: '4.11,4.12,4.13' +- dsp: '1.3' + kfp: '1.5.1 (tktn)' + argo: 'N/A' + ml-metadata: '1.5.0' + envoy: '1.8.4' + ocp-pipelines: '1.8' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: '8.8' + ubi-micro: '8.8' + openshift: '4.10,4.11,4.12' +- dsp: '1.2' + kfp: '1.5.1 (tktn)' + argo: 'N/A' + ml-metadata: '1.5.0' + envoy: '1.8.4' + ocp-pipelines: '1.8' + oauth-proxy: 'v4.10' + mariadb-103: '1' + ubi-minimal: '8.8' + ubi-micro: '8.8' + openshift: '4.10,4.11,4.12' +- dsp: '1.1' + kfp: '1.5.1 (tktn)' + argo: 'N/A' + ml-metadata: '1.5.0' + envoy: '1.8.4' + ocp-pipelines: '1.8' + oauth-proxy: 'v4.12' + mariadb-103: '1' + ubi-minimal: '8.8' + ubi-micro: '8.8' + openshift: '4.10,4.11,4.12' +- dsp: '1.0' + kfp: '1.5.1 (tktn)' + argo: 'N/A' + ml-metadata: '1.5.0' + envoy: '1.8.4' + ocp-pipelines: '1.8' + oauth-proxy: 'v4.12' + mariadb-103: '1' + ubi-minimal: '8.8' + ubi-micro: '8.8' + openshift: '4.10,4.11,4.12' diff --git a/scripts/release/params.py b/scripts/release/params.py index 4a44437c3..f75c62310 100644 --- a/scripts/release/params.py +++ b/scripts/release/params.py @@ -12,6 +12,7 @@ "IMAGES_SCHEDULEDWORKFLOW": "ds-pipelines-scheduledworkflow", "IMAGES_LAUNCHER": "ds-pipelines-launcher", "IMAGES_DRIVER": "ds-pipelines-driver", + "IMAGES_PIPELINESRUNTIMEGENERIC": "ds-pipelines-runtime-generic", } TAGGED_REPOS = { @@ -33,6 +34,8 @@ "IMAGES_MLMDENVOY": "registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:02b834fd74da71ec37f6a5c0d10aac9a679d1a0f4e510c4f77723ef2367e858a", "IMAGES_MARIADB": "registry.redhat.io/rhel8/mariadb-103@sha256:3d30992e60774f887c4e7959c81b0c41b0d82d042250b3b56f05ab67fd4cdee1", "IMAGES_OAUTHPROXY": "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:4f8d66597feeb32bb18699326029f9a71a5aca4a57679d636b876377c2e95695", + "IMAGES_TOOLBOX": "registry.redhat.io/ubi9/toolbox@sha256:da31dee8904a535d12689346e65e5b00d11a6179abf1fa69b548dbd755fa2770", + "IMAGES_RHELAI": "registry.redhat.io/rhelai1/instructlab-nvidia-rhel9@sha256:05cfba1fb13ed54b1de4d021da2a31dd78ba7d8cc48e10c7fe372815899a18ae", } OTHER_OPTIONS = { From 823a012da806ba960549886b95c2436ac6418d2b Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Tue, 4 Feb 2025 18:01:07 -0500 Subject: [PATCH 03/14] parameterize init resources Signed-off-by: Humair Khan --- .github/workflows/build-tags.yml | 4 +- api/v1/dspipeline_types.go | 3 + api/v1/zz_generated.deepcopy.go | 5 ++ api/v1alpha1/dspipeline_types.go | 3 + api/v1alpha1/zz_generated.deepcopy.go | 5 ++ ...b.io_datasciencepipelinesapplications.yaml | 72 +++++++++++++++++++ .../apiserver/default/deployment.yaml.tmpl | 22 ++++-- controllers/config/defaults.go | 1 + controllers/dspipeline_params.go | 1 + .../created/apiserver_deployment.yaml | 8 +-- .../declarative/case_2/deploy/cr.yaml | 7 ++ .../created/apiserver_deployment.yaml | 10 +-- .../created/apiserver_deployment.yaml | 10 +-- .../created/apiserver_deployment.yaml | 10 +-- .../created/apiserver_deployment.yaml | 10 +-- .../created/apiserver_deployment.yaml | 10 +-- controllers/testutil/equalities.go | 29 ++++++-- tests/resources/dspa-external-lite.yaml | 7 ++ tests/resources/dspa-external.yaml | 7 ++ 19 files changed, 183 insertions(+), 41 deletions(-) diff --git a/.github/workflows/build-tags.yml b/.github/workflows/build-tags.yml index 73cd4829d..ec16412ba 100644 --- a/.github/workflows/build-tags.yml +++ b/.github/workflows/build-tags.yml @@ -90,7 +90,7 @@ env: IMAGE_REPO_SWF: ds-pipelines-scheduledworkflow IMAGE_REPO_LAUNCHER: ds-pipelines-launcher IMAGE_REPO_DRIVER: ds-pipelines-driver - IMAGE_REPO_PIELINES_RUNTIME_GENERIC: ds-pipelines-runtime-generic + IMAGE_REPO_PIPELINES_RUNTIME_GENERIC: ds-pipelines-runtime-generic SOURCE_DSPO_BRANCH: ${{ inputs.src_dspo_branch }} SOURCE_DSP_BRANCH: ${{ inputs.src_dsp_branch }} SOURCE_DSP_PIPELINES_BRANCH: ${{ inputs.src_pipelines_branch }} @@ -223,7 +223,7 @@ jobs: env: SOURCE_BRANCH: ${{ env.SOURCE_DSP_PIPELINES_BRANCH }} with: - IMAGE_REPO: ${{ env.IMAGE_REPO_PIELINES_RUNTIME_GENERIC }} + IMAGE_REPO: ${{ env.IMAGE_REPO_PIPELINES_RUNTIME_GENERIC }} DOCKERFILE: Dockerfile GH_REPO: ${{ inputs.pipelines_org_repo }} OVERWRITE: ${{ env.OVERWRITE_IMAGES }} diff --git a/api/v1/dspipeline_types.go b/api/v1/dspipeline_types.go index adc383720..47536617f 100644 --- a/api/v1/dspipeline_types.go +++ b/api/v1/dspipeline_types.go @@ -84,6 +84,9 @@ type APIServer struct { RHELAIImage string `json:"rhelAIImage,omitempty"` // Specify custom Pod resource requirements for this component. Resources *ResourceRequirements `json:"resources,omitempty"` + // Specify init container resource requirements. The init container + // is used to build managed-pipelines and store them in a shared volume. + InitResources *ResourceRequirements `json:"initResources,omitempty"` // If the Object store/DB is behind a TLS secured connection that is // unrecognized by the host OpenShift/K8s cluster, then you can diff --git a/api/v1/zz_generated.deepcopy.go b/api/v1/zz_generated.deepcopy.go index f3788e5f1..ce72ec3c2 100644 --- a/api/v1/zz_generated.deepcopy.go +++ b/api/v1/zz_generated.deepcopy.go @@ -34,6 +34,11 @@ func (in *APIServer) DeepCopyInto(out *APIServer) { *out = new(ResourceRequirements) (*in).DeepCopyInto(*out) } + if in.InitResources != nil { + in, out := &in.InitResources, &out.InitResources + *out = new(ResourceRequirements) + (*in).DeepCopyInto(*out) + } if in.CABundle != nil { in, out := &in.CABundle, &out.CABundle *out = new(CABundle) diff --git a/api/v1alpha1/dspipeline_types.go b/api/v1alpha1/dspipeline_types.go index ff07fc70c..00035a617 100644 --- a/api/v1alpha1/dspipeline_types.go +++ b/api/v1alpha1/dspipeline_types.go @@ -84,6 +84,9 @@ type APIServer struct { RHELAIImage string `json:"rhelAIImage,omitempty"` // Specify custom Pod resource requirements for this component. Resources *ResourceRequirements `json:"resources,omitempty"` + // Specify init container resource requirements. The init container + // is used to build managed-pipelines and store them in a shared volume. + InitResources *ResourceRequirements `json:"initResources,omitempty"` // If the Object store/DB is behind a TLS secured connection that is // unrecognized by the host OpenShift/K8s cluster, then you can diff --git a/api/v1alpha1/zz_generated.deepcopy.go b/api/v1alpha1/zz_generated.deepcopy.go index ac5fa0ceb..664b56a23 100644 --- a/api/v1alpha1/zz_generated.deepcopy.go +++ b/api/v1alpha1/zz_generated.deepcopy.go @@ -34,6 +34,11 @@ func (in *APIServer) DeepCopyInto(out *APIServer) { *out = new(ResourceRequirements) (*in).DeepCopyInto(*out) } + if in.InitResources != nil { + in, out := &in.InitResources, &out.InitResources + *out = new(ResourceRequirements) + (*in).DeepCopyInto(*out) + } if in.CABundle != nil { in, out := &in.CABundle, &out.CABundle *out = new(CABundle) diff --git a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml index e45c1d8ba..c4802251f 100644 --- a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml +++ b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml @@ -120,6 +120,42 @@ spec: image: description: Specify a custom image for DSP API Server. type: string + initResources: + description: Specify init container resource requirements. The + init container is used to build managed-pipelines and store + them in a shared volume. + properties: + limits: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object resources: description: Specify custom Pod resource requirements for this component. @@ -1018,6 +1054,42 @@ spec: image: description: Specify a custom image for DSP API Server. type: string + initResources: + description: Specify init container resource requirements. The + init container is used to build managed-pipelines and store + them in a shared volume. + properties: + limits: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object injectDefaultScript: default: true description: 'Inject the archive step script. Default: true Deprecated: diff --git a/config/internal/apiserver/default/deployment.yaml.tmpl b/config/internal/apiserver/default/deployment.yaml.tmpl index a25496508..641f0e0f5 100644 --- a/config/internal/apiserver/default/deployment.yaml.tmpl +++ b/config/internal/apiserver/default/deployment.yaml.tmpl @@ -36,13 +36,25 @@ spec: value: {{.APIServer.ToolboxImage}} - name: RHELAI_IMAGE value: {{.APIServer.RHELAIImage}} + {{ if .APIServer.InitResources }} resources: - limits: - memory: 200Mi - cpu: '1' requests: - memory: 200Mi - cpu: '1' + {{ if .APIServer.InitResources.Requests.CPU }} + cpu: {{.APIServer.InitResources.Requests.CPU}} + {{ end }} + {{ if .APIServer.InitResources.Requests.Memory }} + memory: {{.APIServer.InitResources.Requests.Memory}} + {{ end }} + {{ end }} + {{ if .APIServer.InitResources.Limits }} + limits: + {{ if .APIServer.InitResources.Limits.CPU }} + cpu: {{.APIServer.InitResources.Limits.CPU}} + {{ end }} + {{ if .APIServer.InitResources.Limits.Memory }} + memory: {{.APIServer.InitResources.Limits.Memory}} + {{ end }} + {{ end }} volumeMounts: - mountPath: /opt/app-root/src/build name: managed-pipelines diff --git a/controllers/config/defaults.go b/controllers/config/defaults.go index a473d0b25..30c82fc0a 100644 --- a/controllers/config/defaults.go +++ b/controllers/config/defaults.go @@ -165,6 +165,7 @@ func GetConfigRequiredFields() []string { // Default ResourceRequirements var ( APIServerResourceRequirements = createResourceRequirement(resource.MustParse("250m"), resource.MustParse("500Mi"), resource.MustParse("500m"), resource.MustParse("1Gi")) + APIServerInitResourceRequirements = createResourceRequirement(resource.MustParse("250m"), resource.MustParse("128Mi"), resource.MustParse("500m"), resource.MustParse("256Mi")) PersistenceAgentResourceRequirements = createResourceRequirement(resource.MustParse("120m"), resource.MustParse("500Mi"), resource.MustParse("250m"), resource.MustParse("1Gi")) ScheduledWorkflowResourceRequirements = createResourceRequirement(resource.MustParse("120m"), resource.MustParse("100Mi"), resource.MustParse("250m"), resource.MustParse("250Mi")) WorkflowControllerResourceRequirements = createResourceRequirement(resource.MustParse("120m"), resource.MustParse("500Mi"), resource.MustParse("250m"), resource.MustParse("1Gi")) diff --git a/controllers/dspipeline_params.go b/controllers/dspipeline_params.go index 2a4f3943b..4a50b54d9 100644 --- a/controllers/dspipeline_params.go +++ b/controllers/dspipeline_params.go @@ -607,6 +607,7 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip setStringDefault(rhelAIImageFromConfig, &p.APIServer.RHELAIImage) setResourcesDefault(config.APIServerResourceRequirements, &p.APIServer.Resources) + setResourcesDefault(config.APIServerInitResourceRequirements, &p.APIServer.InitResources) if p.APIServer.CustomServerConfig == nil { p.APIServer.CustomServerConfig = &dspa.ScriptConfigMap{ diff --git a/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml index 8a571c56e..7d00fcfbb 100644 --- a/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml @@ -40,11 +40,11 @@ spec: value: rhelai:test0 resources: limits: - memory: 200Mi - cpu: '1' + memory: 256Mi + cpu: '500m' requests: - memory: 200Mi - cpu: '1' + memory: 128Mi + cpu: '250m' volumeMounts: - mountPath: /opt/app-root/src/build name: managed-pipelines diff --git a/controllers/testdata/declarative/case_2/deploy/cr.yaml b/controllers/testdata/declarative/case_2/deploy/cr.yaml index be4e900ea..d923e3f06 100644 --- a/controllers/testdata/declarative/case_2/deploy/cr.yaml +++ b/controllers/testdata/declarative/case_2/deploy/cr.yaml @@ -23,6 +23,13 @@ spec: limits: cpu: "2522m" memory: "5Gi" + initResources: + requests: + cpu: "1232m" + memory: "2Gi" + limits: + cpu: "2523m" + memory: "6Gi" persistenceAgent: deploy: true image: persistenceagent:test2 diff --git a/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml index a158666bd..9833180e2 100644 --- a/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml @@ -24,7 +24,7 @@ spec: spec: initContainers: - name: init-pipelines - image: runtimegeneric:test0 + image: runtimegeneric:test2 workingDir: /opt/app-root/src/pipelines/distributed-ilab command: ['/bin/sh', '-c'] args: @@ -40,11 +40,11 @@ spec: value: rhelai:test2 resources: limits: - memory: 200Mi - cpu: '1' + memory: 6Gi + cpu: '2523m' requests: - memory: 200Mi - cpu: '1' + memory: 2Gi + cpu: '1232m' volumeMounts: - mountPath: /opt/app-root/src/build name: managed-pipelines diff --git a/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml index 61af0ffb0..117944959 100644 --- a/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml @@ -24,7 +24,7 @@ spec: spec: initContainers: - name: init-pipelines - image: runtimegeneric:test0 + image: runtimegeneric:test3 workingDir: /opt/app-root/src/pipelines/distributed-ilab command: ['/bin/sh', '-c'] args: @@ -40,11 +40,11 @@ spec: value: rhelai:test3 resources: limits: - memory: 200Mi - cpu: '1' + memory: 256Mi + cpu: '500m' requests: - memory: 200Mi - cpu: '1' + memory: 128Mi + cpu: '250m' volumeMounts: - mountPath: /opt/app-root/src/build name: managed-pipelines diff --git a/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml index fe3b87d0f..4cb5df715 100644 --- a/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml @@ -24,7 +24,7 @@ spec: spec: initContainers: - name: init-pipelines - image: runtimegeneric:test0 + image: runtimegeneric:test4 workingDir: /opt/app-root/src/pipelines/distributed-ilab command: ['/bin/sh', '-c'] args: @@ -40,11 +40,11 @@ spec: value: rhelai:test4 resources: limits: - memory: 200Mi - cpu: '1' + memory: 256Mi + cpu: '500m' requests: - memory: 200Mi - cpu: '1' + memory: 128Mi + cpu: '250m' volumeMounts: - mountPath: /opt/app-root/src/build name: managed-pipelines diff --git a/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml index e4abd60b5..4f4f3c0b2 100644 --- a/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml @@ -24,7 +24,7 @@ spec: spec: initContainers: - name: init-pipelines - image: runtimegeneric:test0 + image: runtimegeneric:test5 workingDir: /opt/app-root/src/pipelines/distributed-ilab command: ['/bin/sh', '-c'] args: @@ -40,11 +40,11 @@ spec: value: rhelai:test5 resources: limits: - memory: 200Mi - cpu: '1' + memory: 256Mi + cpu: '500m' requests: - memory: 200Mi - cpu: '1' + memory: 128Mi + cpu: '250m' volumeMounts: - mountPath: /opt/app-root/src/build name: managed-pipelines diff --git a/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml index 37b446fc9..8367a9033 100644 --- a/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml @@ -24,7 +24,7 @@ spec: spec: initContainers: - name: init-pipelines - image: runtimegeneric:test0 + image: runtimegeneric:test6 workingDir: /opt/app-root/src/pipelines/distributed-ilab command: ['/bin/sh', '-c'] args: @@ -40,11 +40,11 @@ spec: value: rhelai:test6 resources: limits: - memory: 200Mi - cpu: '1' + memory: 256Mi + cpu: '500m' requests: - memory: 200Mi - cpu: '1' + memory: 128Mi + cpu: '250m' volumeMounts: - mountPath: /opt/app-root/src/build name: managed-pipelines diff --git a/controllers/testutil/equalities.go b/controllers/testutil/equalities.go index bc0a9a557..c9dade649 100644 --- a/controllers/testutil/equalities.go +++ b/controllers/testutil/equalities.go @@ -168,18 +168,37 @@ func deploymentsAreEqual(expected, actual *unstructured.Unstructured) (bool, err return false, notDeeplyEqualMsg("Volumes", diff) } - if len(expectedDep.Spec.Template.Spec.Containers) != len(actualDep.Spec.Template.Spec.Containers) { + _, err = compareContainers(expectedDep.Spec.Template.Spec.InitContainers, actualDep.Spec.Template.Spec.InitContainers) + if err != nil { + return false, err + } + + _, err = compareContainers(expectedDep.Spec.Template.Spec.Containers, actualDep.Spec.Template.Spec.Containers) + if err != nil { + return false, err + } + + return true, nil +} + +func compareContainers(expectedContainers []v1.Container, actualContainers []v1.Container) (bool, error) { + + if len(expectedContainers) != len(actualContainers) { return false, notEqualMsg("Container lengths") } - for i := range expectedDep.Spec.Template.Spec.Containers { - expectedContainer := expectedDep.Spec.Template.Spec.Containers[i] - actualContainer := actualDep.Spec.Template.Spec.Containers[i] - diffEnvsMsg := compareEnvs(expectedContainer.Env, actualContainer.Env) + for i := range expectedContainers { + expectedContainer := expectedContainers[i] + actualContainer := actualContainers[i] + + diffEnvsMsg := compareEnvs(expectedContainer.Env, actualContainer.Env) if len(expectedContainer.Env) != len(actualContainer.Env) { return false, notEqualMsg(fmt.Sprintf("Container Env Lengths [expected: %d, actual: %d]\nDivergence(s): %s", len(expectedContainer.Env), len(actualContainer.Env), diffEnvsMsg)) } + + var diff []string + // Check each env individually for a more meaningful response upon failure. for i, expectedEnv := range expectedContainer.Env { actualEnv := actualContainer.Env[i] diff --git a/tests/resources/dspa-external-lite.yaml b/tests/resources/dspa-external-lite.yaml index 3a409e2d4..8a5033aa9 100644 --- a/tests/resources/dspa-external-lite.yaml +++ b/tests/resources/dspa-external-lite.yaml @@ -19,6 +19,13 @@ spec: requests: cpu: 20m memory: 100m + initResources: + limits: + cpu: 20m + memory: 200Mi + requests: + cpu: 20m + memory: 100Mi scheduledWorkflow: deploy: true resources: diff --git a/tests/resources/dspa-external.yaml b/tests/resources/dspa-external.yaml index 89c9db009..4289eb24e 100644 --- a/tests/resources/dspa-external.yaml +++ b/tests/resources/dspa-external.yaml @@ -12,6 +12,13 @@ spec: cABundle: configMapName: root-ca configMapKey: public.crt + initResources: + limits: + cpu: 20m + memory: 200Mi + requests: + cpu: 20m + memory: 100Mi scheduledWorkflow: deploy: true persistenceAgent: From 64c1d81f61cfd10f5cdbfb1f6222ada222441592 Mon Sep 17 00:00:00 2001 From: Daniel Dowler <12484302+dandawg@users.noreply.github.com> Date: Mon, 10 Feb 2025 11:14:04 -0700 Subject: [PATCH 04/14] glog upgraded to 1.2.4 fixes CVE GO-2025-3372 Signed-off-by: Daniel Dowler <12484302+dandawg@users.noreply.github.com> --- go.mod | 2 +- go.sum | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/go.mod b/go.mod index e1d68b42f..8053575b9 100644 --- a/go.mod +++ b/go.mod @@ -8,7 +8,7 @@ require ( github.com/go-logr/logr v1.2.4 github.com/go-sql-driver/mysql v1.7.1 github.com/go-test/deep v1.1.0 - github.com/golang/glog v1.1.0 + github.com/golang/glog v1.2.4 github.com/manifestival/controller-runtime-client v0.4.0 github.com/manifestival/manifestival v0.7.2 github.com/minio/minio-go/v7 v7.0.56 diff --git a/go.sum b/go.sum index 3a41f2156..7698e4367 100644 --- a/go.sum +++ b/go.sum @@ -862,8 +862,9 @@ github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGw github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v0.0.0-20210429001901-424d2337a529/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= -github.com/golang/glog v1.1.0 h1:/d3pCKDPWNnvIWe0vVUpNP32qc8U3PDVxySP/y360qE= github.com/golang/glog v1.1.0/go.mod h1:pfYeQZ3JWZoXTV5sFc986z3HTpwQs9At6P4ImfuP3NQ= +github.com/golang/glog v1.2.4 h1:CNNw5U8lSiiBk7druxtSHHTsRWcxKoac6kZKm2peBBc= +github.com/golang/glog v1.2.4/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= From 11a9a7cce9983aa3a7cff4131197a9ab713e390c Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Tue, 14 Jan 2025 16:31:35 -0500 Subject: [PATCH 05/14] Dynamically Generate Sample Config - Allows for configuration of per-DSPA Managed Pipelines Signed-off-by: Giulio Frasca --- api/v1/dspipeline_types.go | 13 ++ api/v1/zz_generated.deepcopy.go | 20 +++ api/v1alpha1/dspipeline_types.go | 13 ++ api/v1alpha1/zz_generated.deepcopy.go | 20 +++ config/base/kustomization.yaml | 14 ++ config/base/params.env | 2 + config/configmaps/files/config.yaml | 15 ++ ...b.io_datasciencepipelinesapplications.yaml | 30 ++++ .../sample-pipeline/sample-config.yaml.tmpl | 8 +- config/manager/manager.yaml | 4 + controllers/apiserver.go | 130 ++++++++++++++++++ controllers/config/defaults.go | 12 ++ controllers/dspipeline_controller.go | 9 ++ controllers/dspipeline_params.go | 19 ++- 14 files changed, 301 insertions(+), 8 deletions(-) diff --git a/api/v1/dspipeline_types.go b/api/v1/dspipeline_types.go index 47536617f..99b3a8d79 100644 --- a/api/v1/dspipeline_types.go +++ b/api/v1/dspipeline_types.go @@ -55,6 +55,17 @@ type DSPASpec struct { *WorkflowController `json:"workflowController,omitempty"` } +type ManagedPipelines struct { + // Include instructlab multi-phase training pipelines with the deployment of this DSP API Server. Default: true + // Applicable values:"Managed" or "Removed" + // +kubebuilder:validation:Optional + EnableInstructLabPipeline string `json:"enableInstructLabPipeline,omitempty"` + // Include sample pipelines with the deployment of this DSP API Server. Default: true + // Applicable values:"Managed" or "Removed" + // +kubebuilder:validation:Optional + EnableIrisPipeline string `json:"enableIrisPipeline,omitempty"` +} + type APIServer struct { // Enable DS Pipelines Operator management of DSP API Server. Setting Deploy to false disables operator reconciliation. Default: true // +kubebuilder:default:=true @@ -82,6 +93,8 @@ type APIServer struct { ToolboxImage string `json:"toolboxImage,omitempty"` // RhelAI image used for ilab tasks in managed pipelines. RHELAIImage string `json:"rhelAIImage,omitempty"` + // Enable various pipelines with the deployment of this DSP API server. + ManagedPipelines *ManagedPipelines `json:"managedPipelines,omitempty"` // Specify custom Pod resource requirements for this component. Resources *ResourceRequirements `json:"resources,omitempty"` // Specify init container resource requirements. The init container diff --git a/api/v1/zz_generated.deepcopy.go b/api/v1/zz_generated.deepcopy.go index ce72ec3c2..7137f7e31 100644 --- a/api/v1/zz_generated.deepcopy.go +++ b/api/v1/zz_generated.deepcopy.go @@ -29,6 +29,11 @@ import ( // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *APIServer) DeepCopyInto(out *APIServer) { *out = *in + if in.ManagedPipelines != nil { + in, out := &in.ManagedPipelines, &out.ManagedPipelines + *out = new(ManagedPipelines) + **out = **in + } if in.Resources != nil { in, out := &in.Resources, &out.Resources *out = new(ResourceRequirements) @@ -395,6 +400,21 @@ func (in *MLMD) DeepCopy() *MLMD { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ManagedPipelines) DeepCopyInto(out *ManagedPipelines) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedPipelines. +func (in *ManagedPipelines) DeepCopy() *ManagedPipelines { + if in == nil { + return nil + } + out := new(ManagedPipelines) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *MariaDB) DeepCopyInto(out *MariaDB) { *out = *in diff --git a/api/v1alpha1/dspipeline_types.go b/api/v1alpha1/dspipeline_types.go index 00035a617..41be2c45d 100644 --- a/api/v1alpha1/dspipeline_types.go +++ b/api/v1alpha1/dspipeline_types.go @@ -55,6 +55,17 @@ type DSPASpec struct { *WorkflowController `json:"workflowController,omitempty"` } +type ManagedPipelines struct { + // Include instructlab multi-phase training pipelines with the deployment of this DSP API Server. Default: true + // Applicable values:"Managed" or "Removed" + // +kubebuilder:validation:Optional + EnableInstructLabPipeline string `json:"enableInstructLabPipeline,omitempty"` + // Include sample pipelines with the deployment of this DSP API Server. Default: true + // Applicable values:"Managed" or "Removed" + // +kubebuilder:validation:Optional + EnableIrisPipeline string `json:"enableIrisPipeline,omitempty"` +} + type APIServer struct { // Enable DS Pipelines Operator management of DSP API Server. Setting Deploy to false disables operator reconciliation. Default: true // +kubebuilder:default:=true @@ -82,6 +93,8 @@ type APIServer struct { ToolboxImage string `json:"toolboxImage,omitempty"` // RhelAI image used for ilab tasks in managed pipelines. RHELAIImage string `json:"rhelAIImage,omitempty"` + // Enable various pipelines with the deployment of this DSP API server. + ManagedPipelines *ManagedPipelines `json:"managedPipelines,omitempty"` // Specify custom Pod resource requirements for this component. Resources *ResourceRequirements `json:"resources,omitempty"` // Specify init container resource requirements. The init container diff --git a/api/v1alpha1/zz_generated.deepcopy.go b/api/v1alpha1/zz_generated.deepcopy.go index 664b56a23..d48cc0c98 100644 --- a/api/v1alpha1/zz_generated.deepcopy.go +++ b/api/v1alpha1/zz_generated.deepcopy.go @@ -29,6 +29,11 @@ import ( // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *APIServer) DeepCopyInto(out *APIServer) { *out = *in + if in.ManagedPipelines != nil { + in, out := &in.ManagedPipelines, &out.ManagedPipelines + *out = new(ManagedPipelines) + **out = **in + } if in.Resources != nil { in, out := &in.Resources, &out.Resources *out = new(ResourceRequirements) @@ -405,6 +410,21 @@ func (in *MLMD) DeepCopy() *MLMD { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ManagedPipelines) DeepCopyInto(out *ManagedPipelines) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedPipelines. +func (in *ManagedPipelines) DeepCopy() *ManagedPipelines { + if in == nil { + return nil + } + out := new(ManagedPipelines) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *MariaDB) DeepCopyInto(out *MariaDB) { *out = *in diff --git a/config/base/kustomization.yaml b/config/base/kustomization.yaml index 2728a140f..0ec304a6a 100644 --- a/config/base/kustomization.yaml +++ b/config/base/kustomization.yaml @@ -167,5 +167,19 @@ vars: apiVersion: v1 fieldref: fieldpath: data.DSPO_APISERVER_INCLUDE_OWNERREFERENCE + - name: MANAGEDPIPELINES + objref: + kind: ConfigMap + name: dspo-parameters + apiVersion: v1 + fieldref: + fieldpath: data.MANAGEDPIPELINES + - name: PLATFORMVERSION + objref: + kind: ConfigMap + name: dspo-parameters + apiVersion: v1 + fieldref: + fieldpath: data.PLATFORMVERSION configurations: - params.yaml diff --git a/config/base/params.env b/config/base/params.env index f7475f249..619ec513a 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -19,3 +19,5 @@ DSPO_HEALTHCHECK_DATABASE_CONNECTIONTIMEOUT=15s DSPO_HEALTHCHECK_OBJECTSTORE_CONNECTIONTIMEOUT=15s DSPO_REQUEUE_TIME=20s DSPO_APISERVER_INCLUDE_OWNERREFERENCE=true +MANAGEDPIPELINES="{}" +PLATFORMVERSION="v0.0.0" diff --git a/config/configmaps/files/config.yaml b/config/configmaps/files/config.yaml index b334582d8..7640a5147 100644 --- a/config/configmaps/files/config.yaml +++ b/config/configmaps/files/config.yaml @@ -13,6 +13,20 @@ Images: RuntimeGeneric: $(IMAGES_PIPELINESRUNTIMEGENERIC) Toolbox: $(IMAGES_TOOLBOX) RHELAI: $(IMAGES_RHELAI) +Samples: + Iris: /pipelines/iris-compiled.yaml + Instructlab: /pipelines/instructlab.yaml +ManagedPipelinesMetadata: + Instructlab: + Name: "[InstructLab] LLM Training Pipeline" + Description: + Filepath: /pipelines/instructlab.yaml + VersionName: "[InstructLab] LLM Training Pipeline - $(PLATFORMVERSION)" + Iris: + Name: "[Demo] iris-training" + Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" + Filepath: /samples/iris-pipeline-compiled.yaml + VersionName: "[Demo] iris-training - $(PLATFORMVERSION)" DSPO: HealthCheck: Database: @@ -20,3 +34,4 @@ DSPO: ObjectStore: ConnectionTimeout: $(DSPO_HEALTHCHECK_OBJECTSTORE_CONNECTIONTIMEOUT) RequeueTime: $(DSPO_REQUEUE_TIME) + PlatformVersion: $(PLATFORMVERSION) diff --git a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml index c4802251f..58c15a7bd 100644 --- a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml +++ b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml @@ -156,6 +156,21 @@ spec: x-kubernetes-int-or-string: true type: object type: object + managedPipelines: + description: Enable various pipelines with the deployment of this + DSP API server. + properties: + enableInstructLabPipeline: + description: 'Include instructlab multi-phase training pipelines + with the deployment of this DSP API Server. Default: true + Applicable values:"Managed" or "Removed"' + type: string + enableIrisPipeline: + description: 'Include sample pipelines with the deployment + of this DSP API Server. Default: true Applicable values:"Managed" + or "Removed"' + type: string + type: object resources: description: Specify custom Pod resource requirements for this component. @@ -1095,6 +1110,21 @@ spec: description: 'Inject the archive step script. Default: true Deprecated: DSP V1 only, will be removed in the future.' type: boolean + managedPipelines: + description: Enable various pipelines with the deployment of this + DSP API server. + properties: + enableInstructLabPipeline: + description: 'Include instructlab multi-phase training pipelines + with the deployment of this DSP API Server. Default: true + Applicable values:"Managed" or "Removed"' + type: string + enableIrisPipeline: + description: 'Include sample pipelines with the deployment + of this DSP API Server. Default: true Applicable values:"Managed" + or "Removed"' + type: string + type: object moveResultsImage: description: 'Image used for internal artifact passing handling within Tekton taskruns. This field specifies the image used diff --git a/config/internal/apiserver/sample-pipeline/sample-config.yaml.tmpl b/config/internal/apiserver/sample-pipeline/sample-config.yaml.tmpl index 76e64d65d..7951b25d8 100644 --- a/config/internal/apiserver/sample-pipeline/sample-config.yaml.tmpl +++ b/config/internal/apiserver/sample-pipeline/sample-config.yaml.tmpl @@ -8,10 +8,4 @@ metadata: component: data-science-pipelines data: sample_config.json: |- - [ - { - "name": "[Demo] iris-training", - "description": "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow", - "file": "/samples/iris-pipeline-compiled.yaml" - } - ] + {{ .SampleConfigJSON }} diff --git a/config/manager/manager.yaml b/config/manager/manager.yaml index 281407679..28c9ca661 100644 --- a/config/manager/manager.yaml +++ b/config/manager/manager.yaml @@ -79,6 +79,10 @@ spec: # It must always be enabled in production - name: DSPO_APISERVER_INCLUDE_OWNERREFERENCE value: $(DSPO_APISERVER_INCLUDE_OWNERREFERENCE) + - name: MANAGEDPIPELINES + value: $(MANAGEDPIPELINES) + - name: PLATFORMVERSION + value: $(PLATFORMVERSION) securityContext: allowPrivilegeEscalation: false capabilities: diff --git a/controllers/apiserver.go b/controllers/apiserver.go index 929ede232..16e8ea12b 100644 --- a/controllers/apiserver.go +++ b/controllers/apiserver.go @@ -17,8 +17,13 @@ package controllers import ( "context" + "encoding/json" + "fmt" + "strings" + dspa "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" + "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" v1 "github.com/openshift/api/route/v1" corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/types" @@ -32,6 +37,22 @@ const apiServerDefaultResourceNamePrefix = "ds-pipeline-" // as such it is handled separately const serverRoute = "apiserver/route/route.yaml.tmpl" +const FullSampleConfigJSON = "[{\"name\": \"Instructlab FOOOZ\", \"description\": \"Instructlab\", \"file\": \"/pipelines/instructlab.yaml\"}]" + +// [ +// { +// "name": "[Demo] iris-training", +// "description": "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow", +// "file": "/samples/iris-pipeline-compiled.yaml" +// }, +// { +// "name": "Instructlab FOOOOOOO", +// "description": "Instructlab", +// "file": "/pipelines/instructlab.yaml" +// } +// ] +// ` + // Sample Pipeline and Config are resources deployed conditionally // as such it is handled separately var samplePipelineTemplates = map[string]string{ @@ -39,6 +60,114 @@ var samplePipelineTemplates = map[string]string{ "sample-config": "apiserver/sample-pipeline/sample-config.yaml.tmpl", } +func (r *DSPAReconciler) GenerateSamplePipelineMetadataBlock(pipeline string) (map[string]string, error) { + + item := make(map[string]string) + + // Get Required Fields + pName, err := config.GetStringConfigOrDie(fmt.Sprintf("ManagedPipelinesMetadata.%s.Name", pipeline)) + if err != nil { + return nil, err + } + pFile, err := config.GetStringConfigOrDie(fmt.Sprintf("ManagedPipelinesMetadata.%s.Filepath", pipeline)) + if err != nil { + return nil, err + } + + // Get optional fields + pDesc := config.GetStringConfigWithDefault(fmt.Sprintf("ManagedPipelinesMetadata.%s.Description", pipeline), "") + + // Create Sample Config item + item["name"] = pName + item["file"] = pFile + item["description"] = pDesc + + return item, nil + +} + +func (r *DSPAReconciler) GetSampleConfig(ctx context.Context, dsp *dspa.DataSciencePipelinesApplication, params *DSPAParams) (string, error) { + // TODO(gfrasca): do this more systematically and/or extendably + enableInstructLabPipeline, err := r.IsPipelineEnabledByPlatform("instructlab") + if err != nil { + return "", err + } + enableIrisPipeline, err := r.IsPipelineEnabledByPlatform("iris") + if err != nil { + return "", err + } + + // Check if InstructLab Pipeline enabled in this DSPA + if dsp.Spec.APIServer.ManagedPipelines != nil { + settingInDSPA := dsp.Spec.APIServer.ManagedPipelines.EnableInstructLabPipeline + if strings.EqualFold(settingInDSPA, "Managed") { + enableInstructLabPipeline = true + } else if strings.EqualFold(settingInDSPA, "Removed") { + enableInstructLabPipeline = false + } + } + + // Check if Iris Pipeline enabled in this DSPA + // Legacy support case + if dsp.Spec.APIServer.EnableSamplePipeline { + enableIrisPipeline = true + } else if dsp.Spec.APIServer.ManagedPipelines != nil { + settingInDSPA := dsp.Spec.APIServer.ManagedPipelines.EnableIrisPipeline + if strings.EqualFold(settingInDSPA, "Managed") { + enableIrisPipeline = true + } else if strings.EqualFold(settingInDSPA, "Removed") { + enableIrisPipeline = false + } + } + + return r.GenerateSampleConfigJSON(enableInstructLabPipeline, enableIrisPipeline) +} + +func (r *DSPAReconciler) IsPipelineEnabledByPlatform(pipelineName string) (bool, error) { + var platformManagedPipelines map[string]map[string]string + platformPipelinesJSON := config.GetStringConfigWithDefault("ManagedPipelines", config.DefaultManagedPipelines) + + err := json.Unmarshal([]byte(platformPipelinesJSON), &platformManagedPipelines) + if err != nil { + return false, err + } + + for name, val := range platformManagedPipelines { + if strings.EqualFold(name, pipelineName) { + return strings.EqualFold(val["state"], "Managed"), nil + } + } + return false, nil +} + +func (r *DSPAReconciler) GenerateSampleConfigJSON(enableInstructLabPipeline, enableIrisPipeline bool) (string, error) { + + // Now generate a sample config + var sampleConfig = make([]map[string]string, 0) + if enableInstructLabPipeline { + item, err := r.GenerateSamplePipelineMetadataBlock("instructlab") + if err != nil { + return "", err + } + sampleConfig = append(sampleConfig, item) + } + if enableIrisPipeline { + item, err := r.GenerateSamplePipelineMetadataBlock("iris") + if err != nil { + return "", err + } + sampleConfig = append(sampleConfig, item) + } + + // Marshal into a JSON String + outputJSON, err := json.Marshal(sampleConfig) + if err != nil { + return "", err + } + + return string(outputJSON), nil +} + func (r *DSPAReconciler) ReconcileAPIServer(ctx context.Context, dsp *dspav1.DataSciencePipelinesApplication, params *DSPAParams) error { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) @@ -68,6 +197,7 @@ func (r *DSPAReconciler) ReconcileAPIServer(ctx context.Context, dsp *dspav1.Dat } for cmName, template := range samplePipelineTemplates { + //if dsp.Spec.APIServer.EnableSamplePipeline || dsp.Spec.APIServer.ManagedPipelines.EnableIrisPipeline || dsp.Spec.APIServer.ManagedPipelines.EnableInstructLabPipeline { if dsp.Spec.APIServer.EnableSamplePipeline { err := r.Apply(dsp, params, template) if err != nil { diff --git a/controllers/config/defaults.go b/controllers/config/defaults.go index 30c82fc0a..06788ff4c 100644 --- a/controllers/config/defaults.go +++ b/controllers/config/defaults.go @@ -158,6 +158,10 @@ const DefaultRequeueTime = time.Second * 20 const DefaultApiServerIncludeOwnerReferenceConfigName = true +const DefaultManagedPipelines = "{}" + +const DefaultPlatformVersion = "v0.0.0" + func GetConfigRequiredFields() []string { return requiredFields } @@ -191,6 +195,14 @@ func createResourceRequirement(RequestsCPU resource.Quantity, RequestsMemory res } } +func GetStringConfigOrDie(configName string) (string, error) { + if !viper.IsSet(configName) { + return "", fmt.Errorf("value not set in config for configname %s", configName) + } + + return viper.GetString(configName), nil +} + func GetStringConfigWithDefault(configName, value string) string { if !viper.IsSet(configName) { return value diff --git a/controllers/dspipeline_controller.go b/controllers/dspipeline_controller.go index 8f29d8da9..f0aba231e 100644 --- a/controllers/dspipeline_controller.go +++ b/controllers/dspipeline_controller.go @@ -19,6 +19,7 @@ package controllers import ( "context" "fmt" + "github.com/opendatahub-io/data-science-pipelines-operator/controllers/dspastatus" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "sigs.k8s.io/controller-runtime/pkg/controller" @@ -244,12 +245,20 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl. } requeueTime := config.GetDurationConfigWithDefault(config.RequeueTimeConfigName, config.DefaultRequeueTime) + err = params.ExtractParams(ctx, dspa, r.Client, r.Log) if err != nil { log.Info(fmt.Sprintf("Encountered error when parsing CR: [%s]", err)) return ctrl.Result{Requeue: true, RequeueAfter: requeueTime}, nil } + sampleConfigJSON, err := r.GetSampleConfig(ctx, dspa, params) + if err != nil { + log.Info(fmt.Sprintf("Encountered error while generating sample config: [%s]", err)) + return ctrl.Result{Requeue: true, RequeueAfter: requeueTime}, nil + } + params.SampleConfigJSON = sampleConfigJSON + err = r.ReconcileDatabase(ctx, dspa, params) if err != nil { dspaStatus.SetDatabaseNotReady(err, config.FailingToDeploy) diff --git a/controllers/dspipeline_params.go b/controllers/dspipeline_params.go index 4a50b54d9..21c26da83 100644 --- a/controllers/dspipeline_params.go +++ b/controllers/dspipeline_params.go @@ -58,6 +58,7 @@ type DSPAParams struct { APIServerDefaultResourceName string APIServerServiceName string OAuthProxy string + SampleConfigJSON string ScheduledWorkflow *dspa.ScheduledWorkflow ScheduledWorkflowDefaultResourceName string PersistenceAgent *dspa.PersistenceAgent @@ -557,6 +558,15 @@ func (p *DSPAParams) LoadMlmdCertificates(ctx context.Context, client client.Cli return true, nil } +func (p *DSPAParams) GenerateSampleConfig() (string, error) { + sampleConfigJSON := &bytes.Buffer{} + if err := json.Compact(sampleConfigJSON, []byte(FullSampleConfigJSON)); err != nil { + return "", err + } + return sampleConfigJSON.String(), nil + +} + func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePipelinesApplication, client client.Client, loggr logr.Logger) error { p.Name = dsp.Name p.Namespace = dsp.Namespace @@ -591,6 +601,13 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip log := loggr.WithValues("namespace", p.Namespace).WithValues("dspa_name", p.Name) + sampleConfigJSON, err := p.GenerateSampleConfig() + if err != nil { + log.Info(fmt.Sprintf("Error generating samples configuration JSON, Error: %v", err)) + return err + } + p.SampleConfigJSON = sampleConfigJSON + if p.APIServer != nil { serverImageFromConfig := config.GetStringConfigWithDefault(config.APIServerImagePath, config.DefaultImageValue) argoLauncherImageFromConfig := config.GetStringConfigWithDefault(config.LauncherImagePath, config.DefaultImageValue) @@ -832,7 +849,7 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip setResourcesDefault(config.WorkflowControllerResourceRequirements, &p.WorkflowController.Resources) } - err := p.SetupMLMD(dsp, log) + err = p.SetupMLMD(dsp, log) if err != nil { return err } From d24b710b53f2ea2b172e92b1eed5a49ceb817fd4 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Tue, 11 Feb 2025 16:59:35 -0500 Subject: [PATCH 06/14] Remove Platform-scope enablement, Retreive VersionName from Config Signed-off-by: Giulio Frasca --- api/v1/dspipeline_types.go | 19 ++--- api/v1/zz_generated.deepcopy.go | 32 +++++++-- config/configmaps/files/config.yaml | 2 + ...b.io_datasciencepipelinesapplications.yaml | 24 +++---- controllers/apiserver.go | 72 +++++++------------ controllers/dspipeline_params.go | 18 +---- 6 files changed, 77 insertions(+), 90 deletions(-) diff --git a/api/v1/dspipeline_types.go b/api/v1/dspipeline_types.go index 99b3a8d79..7794092a4 100644 --- a/api/v1/dspipeline_types.go +++ b/api/v1/dspipeline_types.go @@ -55,15 +55,17 @@ type DSPASpec struct { *WorkflowController `json:"workflowController,omitempty"` } -type ManagedPipelines struct { - // Include instructlab multi-phase training pipelines with the deployment of this DSP API Server. Default: true +type ManagedPipelineOptions struct { + // Include managed pipelines with the deployment of this DSP API Server. Default: nil // Applicable values:"Managed" or "Removed" // +kubebuilder:validation:Optional - EnableInstructLabPipeline string `json:"enableInstructLabPipeline,omitempty"` - // Include sample pipelines with the deployment of this DSP API Server. Default: true - // Applicable values:"Managed" or "Removed" + State string `json:"state,omitempty"` +} + +type ManagedPipelinesSpec struct { + // Include instructlab multi-phase training pipelines with the deployment of this DSP API Server. // +kubebuilder:validation:Optional - EnableIrisPipeline string `json:"enableIrisPipeline,omitempty"` + InstructLab *ManagedPipelineOptions `json:"instructLab,omitempty"` } type APIServer struct { @@ -77,8 +79,9 @@ type APIServer struct { // +kubebuilder:default:=true // +kubebuilder:validation:Optional EnableRoute bool `json:"enableOauth"` - // Include sample pipelines with the deployment of this DSP API Server. Default: true + // Include Iris sample pipeline with the deployment of this DSP API Server. Default: true // +kubebuilder:default:=false + // +Deprecated // +kubebuilder:validation:Optional EnableSamplePipeline bool `json:"enableSamplePipeline"` // Launcher/Executor image used during pipeline execution. @@ -94,7 +97,7 @@ type APIServer struct { // RhelAI image used for ilab tasks in managed pipelines. RHELAIImage string `json:"rhelAIImage,omitempty"` // Enable various pipelines with the deployment of this DSP API server. - ManagedPipelines *ManagedPipelines `json:"managedPipelines,omitempty"` + ManagedPipelines *ManagedPipelinesSpec `json:"managedPipelines,omitempty"` // Specify custom Pod resource requirements for this component. Resources *ResourceRequirements `json:"resources,omitempty"` // Specify init container resource requirements. The init container diff --git a/api/v1/zz_generated.deepcopy.go b/api/v1/zz_generated.deepcopy.go index 7137f7e31..ea08e97d6 100644 --- a/api/v1/zz_generated.deepcopy.go +++ b/api/v1/zz_generated.deepcopy.go @@ -31,8 +31,8 @@ func (in *APIServer) DeepCopyInto(out *APIServer) { *out = *in if in.ManagedPipelines != nil { in, out := &in.ManagedPipelines, &out.ManagedPipelines - *out = new(ManagedPipelines) - **out = **in + *out = new(ManagedPipelinesSpec) + (*in).DeepCopyInto(*out) } if in.Resources != nil { in, out := &in.Resources, &out.Resources @@ -401,16 +401,36 @@ func (in *MLMD) DeepCopy() *MLMD { } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ManagedPipelines) DeepCopyInto(out *ManagedPipelines) { +func (in *ManagedPipelineOptions) DeepCopyInto(out *ManagedPipelineOptions) { *out = *in } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedPipelines. -func (in *ManagedPipelines) DeepCopy() *ManagedPipelines { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedPipelineOptions. +func (in *ManagedPipelineOptions) DeepCopy() *ManagedPipelineOptions { + if in == nil { + return nil + } + out := new(ManagedPipelineOptions) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ManagedPipelinesSpec) DeepCopyInto(out *ManagedPipelinesSpec) { + *out = *in + if in.InstructLab != nil { + in, out := &in.InstructLab, &out.InstructLab + *out = new(ManagedPipelineOptions) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedPipelinesSpec. +func (in *ManagedPipelinesSpec) DeepCopy() *ManagedPipelinesSpec { if in == nil { return nil } - out := new(ManagedPipelines) + out := new(ManagedPipelinesSpec) in.DeepCopyInto(out) return out } diff --git a/config/configmaps/files/config.yaml b/config/configmaps/files/config.yaml index 7640a5147..0fb6a2914 100644 --- a/config/configmaps/files/config.yaml +++ b/config/configmaps/files/config.yaml @@ -22,11 +22,13 @@ ManagedPipelinesMetadata: Description: Filepath: /pipelines/instructlab.yaml VersionName: "[InstructLab] LLM Training Pipeline - $(PLATFORMVERSION)" + # VersionDescription: "TODO - Add a Version Description" Iris: Name: "[Demo] iris-training" Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" Filepath: /samples/iris-pipeline-compiled.yaml VersionName: "[Demo] iris-training - $(PLATFORMVERSION)" + # VersionDescription: "TODO - Add a Version Description" DSPO: HealthCheck: Database: diff --git a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml index 58c15a7bd..aede90de8 100644 --- a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml +++ b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml @@ -114,8 +114,8 @@ spec: type: boolean enableSamplePipeline: default: false - description: 'Include sample pipelines with the deployment of - this DSP API Server. Default: true' + description: 'Include Iris sample pipeline with the deployment + of this DSP API Server. Default: true' type: boolean image: description: Specify a custom image for DSP API Server. @@ -160,16 +160,16 @@ spec: description: Enable various pipelines with the deployment of this DSP API server. properties: - enableInstructLabPipeline: - description: 'Include instructlab multi-phase training pipelines - with the deployment of this DSP API Server. Default: true - Applicable values:"Managed" or "Removed"' - type: string - enableIrisPipeline: - description: 'Include sample pipelines with the deployment - of this DSP API Server. Default: true Applicable values:"Managed" - or "Removed"' - type: string + instructLab: + description: Include instructlab multi-phase training pipelines + with the deployment of this DSP API Server. + properties: + state: + description: 'Include managed pipelines with the deployment + of this DSP API Server. Default: nil Applicable values:"Managed" + or "Removed"' + type: string + type: object type: object resources: description: Specify custom Pod resource requirements for this diff --git a/controllers/apiserver.go b/controllers/apiserver.go index 16e8ea12b..61a6c6965 100644 --- a/controllers/apiserver.go +++ b/controllers/apiserver.go @@ -37,22 +37,6 @@ const apiServerDefaultResourceNamePrefix = "ds-pipeline-" // as such it is handled separately const serverRoute = "apiserver/route/route.yaml.tmpl" -const FullSampleConfigJSON = "[{\"name\": \"Instructlab FOOOZ\", \"description\": \"Instructlab\", \"file\": \"/pipelines/instructlab.yaml\"}]" - -// [ -// { -// "name": "[Demo] iris-training", -// "description": "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow", -// "file": "/samples/iris-pipeline-compiled.yaml" -// }, -// { -// "name": "Instructlab FOOOOOOO", -// "description": "Instructlab", -// "file": "/pipelines/instructlab.yaml" -// } -// ] -// ` - // Sample Pipeline and Config are resources deployed conditionally // as such it is handled separately var samplePipelineTemplates = map[string]string{ @@ -76,11 +60,15 @@ func (r *DSPAReconciler) GenerateSamplePipelineMetadataBlock(pipeline string) (m // Get optional fields pDesc := config.GetStringConfigWithDefault(fmt.Sprintf("ManagedPipelinesMetadata.%s.Description", pipeline), "") + pVerName := config.GetStringConfigWithDefault(fmt.Sprintf("ManagedPipelinesMetadata.%s.VersionName", pipeline), "") + pVerDesc := config.GetStringConfigWithDefault(fmt.Sprintf("ManagedPipelinesMetadata.%s.VersionDescription", pipeline), "") // Create Sample Config item item["name"] = pName item["file"] = pFile item["description"] = pDesc + item["versionName"] = pVerName + item["versionDescription"] = pVerDesc return item, nil @@ -88,39 +76,25 @@ func (r *DSPAReconciler) GenerateSamplePipelineMetadataBlock(pipeline string) (m func (r *DSPAReconciler) GetSampleConfig(ctx context.Context, dsp *dspa.DataSciencePipelinesApplication, params *DSPAParams) (string, error) { // TODO(gfrasca): do this more systematically and/or extendably - enableInstructLabPipeline, err := r.IsPipelineEnabledByPlatform("instructlab") - if err != nil { - return "", err - } - enableIrisPipeline, err := r.IsPipelineEnabledByPlatform("iris") - if err != nil { - return "", err - } + // enableInstructLabPipeline, err := r.IsPipelineEnabledByPlatform("instructlab") + // if err != nil { + // return "", err + // } + // enableIrisPipeline, err := r.IsPipelineEnabledByPlatform("iris") + // if err != nil { + // return "", err + // } // Check if InstructLab Pipeline enabled in this DSPA - if dsp.Spec.APIServer.ManagedPipelines != nil { - settingInDSPA := dsp.Spec.APIServer.ManagedPipelines.EnableInstructLabPipeline - if strings.EqualFold(settingInDSPA, "Managed") { - enableInstructLabPipeline = true - } else if strings.EqualFold(settingInDSPA, "Removed") { - enableInstructLabPipeline = false + enableInstructLabPipeline := false + if dsp.Spec.APIServer.ManagedPipelines != nil && dsp.Spec.APIServer.ManagedPipelines.InstructLab != nil { + settingInDSPA := dsp.Spec.APIServer.ManagedPipelines.InstructLab.State + if settingInDSPA != "" { + enableInstructLabPipeline = strings.EqualFold(settingInDSPA, "Managed") } } - // Check if Iris Pipeline enabled in this DSPA - // Legacy support case - if dsp.Spec.APIServer.EnableSamplePipeline { - enableIrisPipeline = true - } else if dsp.Spec.APIServer.ManagedPipelines != nil { - settingInDSPA := dsp.Spec.APIServer.ManagedPipelines.EnableIrisPipeline - if strings.EqualFold(settingInDSPA, "Managed") { - enableIrisPipeline = true - } else if strings.EqualFold(settingInDSPA, "Removed") { - enableIrisPipeline = false - } - } - - return r.GenerateSampleConfigJSON(enableInstructLabPipeline, enableIrisPipeline) + return r.GenerateSampleConfigJSON(enableInstructLabPipeline, dsp.Spec.APIServer.EnableSamplePipeline) } func (r *DSPAReconciler) IsPipelineEnabledByPlatform(pipelineName string) (bool, error) { @@ -143,22 +117,26 @@ func (r *DSPAReconciler) IsPipelineEnabledByPlatform(pipelineName string) (bool, func (r *DSPAReconciler) GenerateSampleConfigJSON(enableInstructLabPipeline, enableIrisPipeline bool) (string, error) { // Now generate a sample config - var sampleConfig = make([]map[string]string, 0) + var pipelineConfig = make([]map[string]string, 0) if enableInstructLabPipeline { item, err := r.GenerateSamplePipelineMetadataBlock("instructlab") if err != nil { return "", err } - sampleConfig = append(sampleConfig, item) + pipelineConfig = append(pipelineConfig, item) } if enableIrisPipeline { item, err := r.GenerateSamplePipelineMetadataBlock("iris") if err != nil { return "", err } - sampleConfig = append(sampleConfig, item) + pipelineConfig = append(pipelineConfig, item) } + var sampleConfig = make(map[string]interface{}) + sampleConfig["pipelines"] = pipelineConfig + sampleConfig["loadSamplesOnRestart"] = true + // Marshal into a JSON String outputJSON, err := json.Marshal(sampleConfig) if err != nil { diff --git a/controllers/dspipeline_params.go b/controllers/dspipeline_params.go index 21c26da83..1e08e4c53 100644 --- a/controllers/dspipeline_params.go +++ b/controllers/dspipeline_params.go @@ -558,15 +558,6 @@ func (p *DSPAParams) LoadMlmdCertificates(ctx context.Context, client client.Cli return true, nil } -func (p *DSPAParams) GenerateSampleConfig() (string, error) { - sampleConfigJSON := &bytes.Buffer{} - if err := json.Compact(sampleConfigJSON, []byte(FullSampleConfigJSON)); err != nil { - return "", err - } - return sampleConfigJSON.String(), nil - -} - func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePipelinesApplication, client client.Client, loggr logr.Logger) error { p.Name = dsp.Name p.Namespace = dsp.Namespace @@ -601,13 +592,6 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip log := loggr.WithValues("namespace", p.Namespace).WithValues("dspa_name", p.Name) - sampleConfigJSON, err := p.GenerateSampleConfig() - if err != nil { - log.Info(fmt.Sprintf("Error generating samples configuration JSON, Error: %v", err)) - return err - } - p.SampleConfigJSON = sampleConfigJSON - if p.APIServer != nil { serverImageFromConfig := config.GetStringConfigWithDefault(config.APIServerImagePath, config.DefaultImageValue) argoLauncherImageFromConfig := config.GetStringConfigWithDefault(config.LauncherImagePath, config.DefaultImageValue) @@ -849,7 +833,7 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip setResourcesDefault(config.WorkflowControllerResourceRequirements, &p.WorkflowController.Resources) } - err = p.SetupMLMD(dsp, log) + err := p.SetupMLMD(dsp, log) if err != nil { return err } From aed281a3f27032da245dddccee4a54a71c5895c0 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Tue, 11 Feb 2025 17:05:22 -0500 Subject: [PATCH 07/14] Always deploy sample config - enableSampleConfig=false now updates sample config json directly Signed-off-by: Giulio Frasca --- .../apiserver/default/deployment.yaml.tmpl | 9 +-------- controllers/apiserver.go | 19 ++++--------------- 2 files changed, 5 insertions(+), 23 deletions(-) diff --git a/config/internal/apiserver/default/deployment.yaml.tmpl b/config/internal/apiserver/default/deployment.yaml.tmpl index 641f0e0f5..230b43ce6 100644 --- a/config/internal/apiserver/default/deployment.yaml.tmpl +++ b/config/internal/apiserver/default/deployment.yaml.tmpl @@ -185,9 +185,7 @@ spec: args: - --config=/config - -logtostderr=true - {{ if .APIServer.EnableSamplePipeline }} - --sampleconfig=/config/sample_config.json - {{ end }} {{ if .PodToPodTLS }} - --tlsCertPath=/etc/tls/private/tls.crt - --tlsCertKeyPath=/etc/tls/private/tls.key @@ -246,19 +244,15 @@ spec: - mountPath: /etc/tls/private name: proxy-tls {{ end }} - {{ if or .APIServer.EnableSamplePipeline .CustomCABundle }} - {{ if .APIServer.EnableSamplePipeline }} - name: sample-config mountPath: /config/sample_config.json subPath: sample_config.json - name: sample-pipeline mountPath: /samples/ - {{ end }} {{ if .CustomCABundle }} - mountPath: {{ .CustomCABundleRootMountPath }} name: ca-bundle {{ end }} - {{ end }} {{ if .APIServer.EnableRoute }} - name: oauth-proxy args: @@ -330,11 +324,10 @@ spec: configMap: name: {{ .CustomCABundle.ConfigMapName }} {{ end }} - {{ if .APIServer.EnableSamplePipeline }} - name: sample-config configMap: name: sample-config-{{.Name}} - name: sample-pipeline configMap: name: sample-pipeline-{{.Name}} - {{ end }} + diff --git a/controllers/apiserver.go b/controllers/apiserver.go index 61a6c6965..d7406ce4e 100644 --- a/controllers/apiserver.go +++ b/controllers/apiserver.go @@ -25,7 +25,6 @@ import ( dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" v1 "github.com/openshift/api/route/v1" - corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/types" ) @@ -174,20 +173,10 @@ func (r *DSPAReconciler) ReconcileAPIServer(ctx context.Context, dsp *dspav1.Dat } } - for cmName, template := range samplePipelineTemplates { - //if dsp.Spec.APIServer.EnableSamplePipeline || dsp.Spec.APIServer.ManagedPipelines.EnableIrisPipeline || dsp.Spec.APIServer.ManagedPipelines.EnableInstructLabPipeline { - if dsp.Spec.APIServer.EnableSamplePipeline { - err := r.Apply(dsp, params, template) - if err != nil { - return err - } - } else { - cm := &corev1.ConfigMap{} - namespacedNamed := types.NamespacedName{Name: cmName + "-" + dsp.Name, Namespace: dsp.Namespace} - err := r.DeleteResourceIfItExists(ctx, cm, namespacedNamed) - if err != nil { - return err - } + for _, template := range samplePipelineTemplates { + err := r.Apply(dsp, params, template) + if err != nil { + return err } } From 0331a6b57eb1220633506089b21c9246fb3f8fa2 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Tue, 11 Feb 2025 18:13:40 -0500 Subject: [PATCH 08/14] Apply ConfigHash label to APIServer Pod - Restarts APIServer on SampleConfig JSON changes Signed-off-by: Giulio Frasca --- api/v1/dspipeline_types.go | 21 ++++-- api/v1alpha1/dspipeline_types.go | 32 +++++++--- api/v1alpha1/zz_generated.deepcopy.go | 32 ++++++++-- config/configmaps/files/config.yaml | 9 +-- ...b.io_datasciencepipelinesapplications.yaml | 64 ++++++++++++------- .../apiserver/default/deployment.yaml.tmpl | 3 +- .../overlays/make-deploy/kustomization.yaml | 4 +- controllers/apiserver.go | 55 +++++++--------- controllers/config/defaults.go | 2 +- controllers/dspipeline_controller.go | 7 -- controllers/dspipeline_params.go | 1 + 11 files changed, 136 insertions(+), 94 deletions(-) diff --git a/api/v1/dspipeline_types.go b/api/v1/dspipeline_types.go index 7794092a4..82b04f2e9 100644 --- a/api/v1/dspipeline_types.go +++ b/api/v1/dspipeline_types.go @@ -55,15 +55,24 @@ type DSPASpec struct { *WorkflowController `json:"workflowController,omitempty"` } +// +kubebuilder:validation:Pattern=`^(Managed|Removed)$` +type ManagedPipelineState string + type ManagedPipelineOptions struct { - // Include managed pipelines with the deployment of this DSP API Server. Default: nil - // Applicable values:"Managed" or "Removed" + // Set to one of the following values: + // + // - "Managed" : This pipeline is automatically imported. + // - "Removed" : This pipeline is not automatically imported when a new pipeline server or DSPA is created. If previously set to "Managed", setting to "Removed" does not remove existing preloaded pipelines but does prevent future updates from being imported. + // + // +kubebuilder:validation:Enum=Managed;Removed + // +kubebuilder:default=Removed // +kubebuilder:validation:Optional - State string `json:"state,omitempty"` + State ManagedPipelineState `json:"state,omitempty"` } type ManagedPipelinesSpec struct { - // Include instructlab multi-phase training pipelines with the deployment of this DSP API Server. + // Configures whether to automatically import the InstructLab pipeline. + // You must enable the trainingoperator component to run the InstructLab pipeline. // +kubebuilder:validation:Optional InstructLab *ManagedPipelineOptions `json:"instructLab,omitempty"` } @@ -79,7 +88,7 @@ type APIServer struct { // +kubebuilder:default:=true // +kubebuilder:validation:Optional EnableRoute bool `json:"enableOauth"` - // Include Iris sample pipeline with the deployment of this DSP API Server. Default: true + // Include the Iris sample pipeline with the deployment of this DSP API Server. Default: true // +kubebuilder:default:=false // +Deprecated // +kubebuilder:validation:Optional @@ -96,7 +105,7 @@ type APIServer struct { ToolboxImage string `json:"toolboxImage,omitempty"` // RhelAI image used for ilab tasks in managed pipelines. RHELAIImage string `json:"rhelAIImage,omitempty"` - // Enable various pipelines with the deployment of this DSP API server. + // Enable various managed pipelines on this DSP API server. ManagedPipelines *ManagedPipelinesSpec `json:"managedPipelines,omitempty"` // Specify custom Pod resource requirements for this component. Resources *ResourceRequirements `json:"resources,omitempty"` diff --git a/api/v1alpha1/dspipeline_types.go b/api/v1alpha1/dspipeline_types.go index 41be2c45d..3f7fb05f7 100644 --- a/api/v1alpha1/dspipeline_types.go +++ b/api/v1alpha1/dspipeline_types.go @@ -55,15 +55,26 @@ type DSPASpec struct { *WorkflowController `json:"workflowController,omitempty"` } -type ManagedPipelines struct { - // Include instructlab multi-phase training pipelines with the deployment of this DSP API Server. Default: true - // Applicable values:"Managed" or "Removed" +// +kubebuilder:validation:Pattern=`^(Managed|Removed)$` +type ManagedPipelineState string + +type ManagedPipelineOptions struct { + // Set to one of the following values: + // + // - "Managed" : This pipeline is automatically imported. + // - "Removed" : This pipeline is not automatically imported when a new pipeline server or DSPA is created. If previously set to "Managed", setting to "Removed" does not remove existing preloaded pipelines but does prevent future updates from being imported. + // + // +kubebuilder:validation:Enum=Managed;Removed + // +kubebuilder:default=Removed // +kubebuilder:validation:Optional - EnableInstructLabPipeline string `json:"enableInstructLabPipeline,omitempty"` - // Include sample pipelines with the deployment of this DSP API Server. Default: true - // Applicable values:"Managed" or "Removed" + State ManagedPipelineState `json:"state,omitempty"` +} + +type ManagedPipelinesSpec struct { + // Configures whether to automatically import the InstructLab pipeline. + // You must enable the trainingoperator component to run the InstructLab pipeline. // +kubebuilder:validation:Optional - EnableIrisPipeline string `json:"enableIrisPipeline,omitempty"` + InstructLab *ManagedPipelineOptions `json:"instructLab,omitempty"` } type APIServer struct { @@ -77,8 +88,9 @@ type APIServer struct { // +kubebuilder:default:=true // +kubebuilder:validation:Optional EnableRoute bool `json:"enableOauth"` - // Include sample pipelines with the deployment of this DSP API Server. Default: true + // Include the Iris sample pipeline with the deployment of this DSP API Server. Default: true // +kubebuilder:default:=false + // +Deprecated // +kubebuilder:validation:Optional EnableSamplePipeline bool `json:"enableSamplePipeline"` // Launcher/Executor image used during pipeline execution. @@ -93,8 +105,8 @@ type APIServer struct { ToolboxImage string `json:"toolboxImage,omitempty"` // RhelAI image used for ilab tasks in managed pipelines. RHELAIImage string `json:"rhelAIImage,omitempty"` - // Enable various pipelines with the deployment of this DSP API server. - ManagedPipelines *ManagedPipelines `json:"managedPipelines,omitempty"` + // Enable various managed pipelines on this DSP API server. + ManagedPipelines *ManagedPipelinesSpec `json:"managedPipelines,omitempty"` // Specify custom Pod resource requirements for this component. Resources *ResourceRequirements `json:"resources,omitempty"` // Specify init container resource requirements. The init container diff --git a/api/v1alpha1/zz_generated.deepcopy.go b/api/v1alpha1/zz_generated.deepcopy.go index d48cc0c98..75bd5c279 100644 --- a/api/v1alpha1/zz_generated.deepcopy.go +++ b/api/v1alpha1/zz_generated.deepcopy.go @@ -31,8 +31,8 @@ func (in *APIServer) DeepCopyInto(out *APIServer) { *out = *in if in.ManagedPipelines != nil { in, out := &in.ManagedPipelines, &out.ManagedPipelines - *out = new(ManagedPipelines) - **out = **in + *out = new(ManagedPipelinesSpec) + (*in).DeepCopyInto(*out) } if in.Resources != nil { in, out := &in.Resources, &out.Resources @@ -411,16 +411,36 @@ func (in *MLMD) DeepCopy() *MLMD { } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ManagedPipelines) DeepCopyInto(out *ManagedPipelines) { +func (in *ManagedPipelineOptions) DeepCopyInto(out *ManagedPipelineOptions) { *out = *in } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedPipelines. -func (in *ManagedPipelines) DeepCopy() *ManagedPipelines { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedPipelineOptions. +func (in *ManagedPipelineOptions) DeepCopy() *ManagedPipelineOptions { + if in == nil { + return nil + } + out := new(ManagedPipelineOptions) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ManagedPipelinesSpec) DeepCopyInto(out *ManagedPipelinesSpec) { + *out = *in + if in.InstructLab != nil { + in, out := &in.InstructLab, &out.InstructLab + *out = new(ManagedPipelineOptions) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedPipelinesSpec. +func (in *ManagedPipelinesSpec) DeepCopy() *ManagedPipelinesSpec { if in == nil { return nil } - out := new(ManagedPipelines) + out := new(ManagedPipelinesSpec) in.DeepCopyInto(out) return out } diff --git a/config/configmaps/files/config.yaml b/config/configmaps/files/config.yaml index 0fb6a2914..11436a380 100644 --- a/config/configmaps/files/config.yaml +++ b/config/configmaps/files/config.yaml @@ -13,21 +13,18 @@ Images: RuntimeGeneric: $(IMAGES_PIPELINESRUNTIMEGENERIC) Toolbox: $(IMAGES_TOOLBOX) RHELAI: $(IMAGES_RHELAI) -Samples: - Iris: /pipelines/iris-compiled.yaml - Instructlab: /pipelines/instructlab.yaml ManagedPipelinesMetadata: Instructlab: Name: "[InstructLab] LLM Training Pipeline" Description: Filepath: /pipelines/instructlab.yaml - VersionName: "[InstructLab] LLM Training Pipeline - $(PLATFORMVERSION)" + VersionName: "[InstructLab] LLM Training Pipeline" # VersionDescription: "TODO - Add a Version Description" - Iris: + Iris: Name: "[Demo] iris-training" Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" Filepath: /samples/iris-pipeline-compiled.yaml - VersionName: "[Demo] iris-training - $(PLATFORMVERSION)" + VersionName: "[Demo] iris-training" # VersionDescription: "TODO - Add a Version Description" DSPO: HealthCheck: diff --git a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml index aede90de8..63283364f 100644 --- a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml +++ b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml @@ -114,7 +114,7 @@ spec: type: boolean enableSamplePipeline: default: false - description: 'Include Iris sample pipeline with the deployment + description: 'Include the Iris sample pipeline with the deployment of this DSP API Server. Default: true' type: boolean image: @@ -157,17 +157,27 @@ spec: type: object type: object managedPipelines: - description: Enable various pipelines with the deployment of this - DSP API server. + description: Enable various managed pipelines on this DSP API + server. properties: instructLab: - description: Include instructlab multi-phase training pipelines - with the deployment of this DSP API Server. + description: Configures whether to automatically import the + InstructLab pipeline. You must enable the trainingoperator + component to run the InstructLab pipeline. properties: state: - description: 'Include managed pipelines with the deployment - of this DSP API Server. Default: nil Applicable values:"Managed" - or "Removed"' + default: Removed + description: "Set to one of the following values: \n - + \"Managed\" : This pipeline is automatically imported. + - \"Removed\" : This pipeline is not automatically imported + when a new pipeline server or DSPA is created. If previously + set to \"Managed\", setting to \"Removed\" does not + remove existing preloaded pipelines but does prevent + future updates from being imported." + enum: + - Managed + - Removed + pattern: ^(Managed|Removed)$ type: string type: object type: object @@ -1063,8 +1073,8 @@ spec: type: boolean enableSamplePipeline: default: false - description: 'Include sample pipelines with the deployment of - this DSP API Server. Default: true' + description: 'Include the Iris sample pipeline with the deployment + of this DSP API Server. Default: true' type: boolean image: description: Specify a custom image for DSP API Server. @@ -1111,19 +1121,29 @@ spec: DSP V1 only, will be removed in the future.' type: boolean managedPipelines: - description: Enable various pipelines with the deployment of this - DSP API server. + description: Enable various managed pipelines on this DSP API + server. properties: - enableInstructLabPipeline: - description: 'Include instructlab multi-phase training pipelines - with the deployment of this DSP API Server. Default: true - Applicable values:"Managed" or "Removed"' - type: string - enableIrisPipeline: - description: 'Include sample pipelines with the deployment - of this DSP API Server. Default: true Applicable values:"Managed" - or "Removed"' - type: string + instructLab: + description: Configures whether to automatically import the + InstructLab pipeline. You must enable the trainingoperator + component to run the InstructLab pipeline. + properties: + state: + default: Removed + description: "Set to one of the following values: \n - + \"Managed\" : This pipeline is automatically imported. + - \"Removed\" : This pipeline is not automatically imported + when a new pipeline server or DSPA is created. If previously + set to \"Managed\", setting to \"Removed\" does not + remove existing preloaded pipelines but does prevent + future updates from being imported." + enum: + - Managed + - Removed + pattern: ^(Managed|Removed)$ + type: string + type: object type: object moveResultsImage: description: 'Image used for internal artifact passing handling diff --git a/config/internal/apiserver/default/deployment.yaml.tmpl b/config/internal/apiserver/default/deployment.yaml.tmpl index 230b43ce6..f49e0c32a 100644 --- a/config/internal/apiserver/default/deployment.yaml.tmpl +++ b/config/internal/apiserver/default/deployment.yaml.tmpl @@ -15,6 +15,8 @@ spec: dspa: {{.Name}} template: metadata: + annotations: + configHash: {{.APIServerConfigHash}} labels: app: {{.APIServerDefaultResourceName}} component: data-science-pipelines @@ -330,4 +332,3 @@ spec: - name: sample-pipeline configMap: name: sample-pipeline-{{.Name}} - diff --git a/config/overlays/make-deploy/kustomization.yaml b/config/overlays/make-deploy/kustomization.yaml index f59b8ee7a..900061e63 100644 --- a/config/overlays/make-deploy/kustomization.yaml +++ b/config/overlays/make-deploy/kustomization.yaml @@ -8,5 +8,5 @@ patchesStrategicMerge: - img_patch.yaml images: - name: controller - newName: quay.io/opendatahub/data-science-pipelines-operator - newTag: main + newName: quay.io/gmfrasca/dspo + newTag: managed-pipelines-v0.0.3 diff --git a/controllers/apiserver.go b/controllers/apiserver.go index d7406ce4e..23133ee56 100644 --- a/controllers/apiserver.go +++ b/controllers/apiserver.go @@ -17,6 +17,8 @@ package controllers import ( "context" + "crypto/sha256" + "encoding/hex" "encoding/json" "fmt" "strings" @@ -48,14 +50,15 @@ func (r *DSPAReconciler) GenerateSamplePipelineMetadataBlock(pipeline string) (m item := make(map[string]string) // Get Required Fields - pName, err := config.GetStringConfigOrDie(fmt.Sprintf("ManagedPipelinesMetadata.%s.Name", pipeline)) + pName, err := config.GetStringConfigOrError(fmt.Sprintf("ManagedPipelinesMetadata.%s.Name", pipeline)) if err != nil { return nil, err } - pFile, err := config.GetStringConfigOrDie(fmt.Sprintf("ManagedPipelinesMetadata.%s.Filepath", pipeline)) + pFile, err := config.GetStringConfigOrError(fmt.Sprintf("ManagedPipelinesMetadata.%s.Filepath", pipeline)) if err != nil { return nil, err } + platformVersion := config.GetStringConfigWithDefault("PlatformVersion", config.DefaultPlatformVersion) // Get optional fields pDesc := config.GetStringConfigWithDefault(fmt.Sprintf("ManagedPipelinesMetadata.%s.Description", pipeline), "") @@ -66,7 +69,7 @@ func (r *DSPAReconciler) GenerateSamplePipelineMetadataBlock(pipeline string) (m item["name"] = pName item["file"] = pFile item["description"] = pDesc - item["versionName"] = pVerName + item["versionName"] = fmt.Sprintf("%s - %s", pVerName, strings.Trim(platformVersion, "\"")) item["versionDescription"] = pVerDesc return item, nil @@ -74,45 +77,18 @@ func (r *DSPAReconciler) GenerateSamplePipelineMetadataBlock(pipeline string) (m } func (r *DSPAReconciler) GetSampleConfig(ctx context.Context, dsp *dspa.DataSciencePipelinesApplication, params *DSPAParams) (string, error) { - // TODO(gfrasca): do this more systematically and/or extendably - // enableInstructLabPipeline, err := r.IsPipelineEnabledByPlatform("instructlab") - // if err != nil { - // return "", err - // } - // enableIrisPipeline, err := r.IsPipelineEnabledByPlatform("iris") - // if err != nil { - // return "", err - // } - // Check if InstructLab Pipeline enabled in this DSPA enableInstructLabPipeline := false if dsp.Spec.APIServer.ManagedPipelines != nil && dsp.Spec.APIServer.ManagedPipelines.InstructLab != nil { settingInDSPA := dsp.Spec.APIServer.ManagedPipelines.InstructLab.State if settingInDSPA != "" { - enableInstructLabPipeline = strings.EqualFold(settingInDSPA, "Managed") + enableInstructLabPipeline = strings.EqualFold(string(settingInDSPA), "Managed") } } return r.GenerateSampleConfigJSON(enableInstructLabPipeline, dsp.Spec.APIServer.EnableSamplePipeline) } -func (r *DSPAReconciler) IsPipelineEnabledByPlatform(pipelineName string) (bool, error) { - var platformManagedPipelines map[string]map[string]string - platformPipelinesJSON := config.GetStringConfigWithDefault("ManagedPipelines", config.DefaultManagedPipelines) - - err := json.Unmarshal([]byte(platformPipelinesJSON), &platformManagedPipelines) - if err != nil { - return false, err - } - - for name, val := range platformManagedPipelines { - if strings.EqualFold(name, pipelineName) { - return strings.EqualFold(val["state"], "Managed"), nil - } - } - return false, nil -} - func (r *DSPAReconciler) GenerateSampleConfigJSON(enableInstructLabPipeline, enableIrisPipeline bool) (string, error) { // Now generate a sample config @@ -132,7 +108,7 @@ func (r *DSPAReconciler) GenerateSampleConfigJSON(enableInstructLabPipeline, ena pipelineConfig = append(pipelineConfig, item) } - var sampleConfig = make(map[string]interface{}) + var sampleConfig = make(map[string]any) sampleConfig["pipelines"] = pipelineConfig sampleConfig["loadSamplesOnRestart"] = true @@ -153,8 +129,21 @@ func (r *DSPAReconciler) ReconcileAPIServer(ctx context.Context, dsp *dspav1.Dat return nil } + log.Info("Generating Sample Config") + sampleConfigJSON, err := r.GetSampleConfig(ctx, dsp, params) + if err != nil { + return err + } + params.SampleConfigJSON = sampleConfigJSON + + // Generate configuration hash for rebooting on sample changes + hasher := sha256.New() + hasher.Write([]byte(sampleConfigJSON)) + configHash := hex.EncodeToString(hasher.Sum(nil)) + params.APIServerConfigHash = configHash + log.Info("Applying APIServer Resources") - err := r.ApplyDir(dsp, params, apiServerTemplatesDir) + err = r.ApplyDir(dsp, params, apiServerTemplatesDir) if err != nil { return err } diff --git a/controllers/config/defaults.go b/controllers/config/defaults.go index 06788ff4c..15e6f264e 100644 --- a/controllers/config/defaults.go +++ b/controllers/config/defaults.go @@ -195,7 +195,7 @@ func createResourceRequirement(RequestsCPU resource.Quantity, RequestsMemory res } } -func GetStringConfigOrDie(configName string) (string, error) { +func GetStringConfigOrError(configName string) (string, error) { if !viper.IsSet(configName) { return "", fmt.Errorf("value not set in config for configname %s", configName) } diff --git a/controllers/dspipeline_controller.go b/controllers/dspipeline_controller.go index f0aba231e..9244c5822 100644 --- a/controllers/dspipeline_controller.go +++ b/controllers/dspipeline_controller.go @@ -252,13 +252,6 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl. return ctrl.Result{Requeue: true, RequeueAfter: requeueTime}, nil } - sampleConfigJSON, err := r.GetSampleConfig(ctx, dspa, params) - if err != nil { - log.Info(fmt.Sprintf("Encountered error while generating sample config: [%s]", err)) - return ctrl.Result{Requeue: true, RequeueAfter: requeueTime}, nil - } - params.SampleConfigJSON = sampleConfigJSON - err = r.ReconcileDatabase(ctx, dspa, params) if err != nil { dspaStatus.SetDatabaseNotReady(err, config.FailingToDeploy) diff --git a/controllers/dspipeline_params.go b/controllers/dspipeline_params.go index 1e08e4c53..936420648 100644 --- a/controllers/dspipeline_params.go +++ b/controllers/dspipeline_params.go @@ -57,6 +57,7 @@ type DSPAParams struct { APIServer *dspa.APIServer APIServerDefaultResourceName string APIServerServiceName string + APIServerConfigHash string OAuthProxy string SampleConfigJSON string ScheduledWorkflow *dspa.ScheduledWorkflow From 28b1a5b4872dd2418977ab5a8a53738cb5662472 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Thu, 13 Feb 2025 14:58:54 -0500 Subject: [PATCH 09/14] Fix Functional Tests to accomodate dyanmic sample config JSON generation Signed-off-by: Giulio Frasca --- api/v1/dspipeline_types.go | 3 +-- api/v1alpha1/dspipeline_types.go | 4 +--- config/configmaps/files/config.yaml | 8 +++---- ...b.io_datasciencepipelinesapplications.yaml | 18 +++++++-------- .../overlays/make-deploy/kustomization.yaml | 2 +- controllers/apiserver.go | 22 ++++++++----------- controllers/config/defaults.go | 2 +- .../testdata/declarative/case_0/config.yaml | 12 ++++++++++ .../created/apiserver_deployment.yaml | 2 ++ .../testdata/declarative/case_1/config.yaml | 13 +++++++++++ .../testdata/declarative/case_2/config.yaml | 13 +++++++++++ .../declarative/case_2/deploy/cr.yaml | 3 +++ .../created/apiserver_deployment.yaml | 2 ++ .../expected/created/sample-config.yaml.tmpl | 8 +------ .../testdata/declarative/case_3/config.yaml | 12 ++++++++++ .../created/apiserver_deployment.yaml | 16 ++++++++++++++ .../testdata/declarative/case_4/config.yaml | 12 ++++++++++ .../created/apiserver_deployment.yaml | 16 ++++++++++++++ .../testdata/declarative/case_5/config.yaml | 12 ++++++++++ .../created/apiserver_deployment.yaml | 16 ++++++++++++++ .../testdata/declarative/case_6/config.yaml | 12 ++++++++++ .../created/apiserver_deployment.yaml | 16 ++++++++++++++ 22 files changed, 182 insertions(+), 42 deletions(-) diff --git a/api/v1/dspipeline_types.go b/api/v1/dspipeline_types.go index 82b04f2e9..f617a2f28 100644 --- a/api/v1/dspipeline_types.go +++ b/api/v1/dspipeline_types.go @@ -62,7 +62,7 @@ type ManagedPipelineOptions struct { // Set to one of the following values: // // - "Managed" : This pipeline is automatically imported. - // - "Removed" : This pipeline is not automatically imported when a new pipeline server or DSPA is created. If previously set to "Managed", setting to "Removed" does not remove existing preloaded pipelines but does prevent future updates from being imported. + // - "Removed" : This pipeline is not automatically imported. If previously set to "Managed", setting to "Removed" does not remove existing managed pipelines but does prevent future updates from being imported. // // +kubebuilder:validation:Enum=Managed;Removed // +kubebuilder:default=Removed @@ -90,7 +90,6 @@ type APIServer struct { EnableRoute bool `json:"enableOauth"` // Include the Iris sample pipeline with the deployment of this DSP API Server. Default: true // +kubebuilder:default:=false - // +Deprecated // +kubebuilder:validation:Optional EnableSamplePipeline bool `json:"enableSamplePipeline"` // Launcher/Executor image used during pipeline execution. diff --git a/api/v1alpha1/dspipeline_types.go b/api/v1alpha1/dspipeline_types.go index 3f7fb05f7..7e55efedf 100644 --- a/api/v1alpha1/dspipeline_types.go +++ b/api/v1alpha1/dspipeline_types.go @@ -62,8 +62,7 @@ type ManagedPipelineOptions struct { // Set to one of the following values: // // - "Managed" : This pipeline is automatically imported. - // - "Removed" : This pipeline is not automatically imported when a new pipeline server or DSPA is created. If previously set to "Managed", setting to "Removed" does not remove existing preloaded pipelines but does prevent future updates from being imported. - // + // - "Removed" : This pipeline is not automatically imported. If previously set to "Managed", setting to "Removed" does not remove existing managed pipelines but does prevent future updates from being imported. // // +kubebuilder:validation:Enum=Managed;Removed // +kubebuilder:default=Removed // +kubebuilder:validation:Optional @@ -90,7 +89,6 @@ type APIServer struct { EnableRoute bool `json:"enableOauth"` // Include the Iris sample pipeline with the deployment of this DSP API Server. Default: true // +kubebuilder:default:=false - // +Deprecated // +kubebuilder:validation:Optional EnableSamplePipeline bool `json:"enableSamplePipeline"` // Launcher/Executor image used during pipeline execution. diff --git a/config/configmaps/files/config.yaml b/config/configmaps/files/config.yaml index 11436a380..698ce79c6 100644 --- a/config/configmaps/files/config.yaml +++ b/config/configmaps/files/config.yaml @@ -15,17 +15,15 @@ Images: RHELAI: $(IMAGES_RHELAI) ManagedPipelinesMetadata: Instructlab: - Name: "[InstructLab] LLM Training Pipeline" - Description: + Name: Instructlab + Description: InstructLab fine-tunes models using synthetic data generation (SDG) techniques and a structured taxonomy to create diverse, high-quality training datasets. Filepath: /pipelines/instructlab.yaml - VersionName: "[InstructLab] LLM Training Pipeline" - # VersionDescription: "TODO - Add a Version Description" + VersionName: Instructlab Iris: Name: "[Demo] iris-training" Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" Filepath: /samples/iris-pipeline-compiled.yaml VersionName: "[Demo] iris-training" - # VersionDescription: "TODO - Add a Version Description" DSPO: HealthCheck: Database: diff --git a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml index 63283364f..27095767d 100644 --- a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml +++ b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml @@ -169,11 +169,10 @@ spec: default: Removed description: "Set to one of the following values: \n - \"Managed\" : This pipeline is automatically imported. - - \"Removed\" : This pipeline is not automatically imported - when a new pipeline server or DSPA is created. If previously - set to \"Managed\", setting to \"Removed\" does not - remove existing preloaded pipelines but does prevent - future updates from being imported." + - \"Removed\" : This pipeline is not automatically imported. + If previously set to \"Managed\", setting to \"Removed\" + does not remove existing managed pipelines but does + prevent future updates from being imported." enum: - Managed - Removed @@ -1133,11 +1132,10 @@ spec: default: Removed description: "Set to one of the following values: \n - \"Managed\" : This pipeline is automatically imported. - - \"Removed\" : This pipeline is not automatically imported - when a new pipeline server or DSPA is created. If previously - set to \"Managed\", setting to \"Removed\" does not - remove existing preloaded pipelines but does prevent - future updates from being imported." + - \"Removed\" : This pipeline is not automatically imported. + If previously set to \"Managed\", setting to \"Removed\" + does not remove existing managed pipelines but does + prevent future updates from being imported.\t//" enum: - Managed - Removed diff --git a/config/overlays/make-deploy/kustomization.yaml b/config/overlays/make-deploy/kustomization.yaml index 900061e63..ab0fdf558 100644 --- a/config/overlays/make-deploy/kustomization.yaml +++ b/config/overlays/make-deploy/kustomization.yaml @@ -9,4 +9,4 @@ patchesStrategicMerge: images: - name: controller newName: quay.io/gmfrasca/dspo - newTag: managed-pipelines-v0.0.3 + newTag: managed-pipelines-v0.0.5 diff --git a/controllers/apiserver.go b/controllers/apiserver.go index 23133ee56..7da9b4da3 100644 --- a/controllers/apiserver.go +++ b/controllers/apiserver.go @@ -18,7 +18,6 @@ package controllers import ( "context" "crypto/sha256" - "encoding/hex" "encoding/json" "fmt" "strings" @@ -50,19 +49,19 @@ func (r *DSPAReconciler) GenerateSamplePipelineMetadataBlock(pipeline string) (m item := make(map[string]string) // Get Required Fields - pName, err := config.GetStringConfigOrError(fmt.Sprintf("ManagedPipelinesMetadata.%s.Name", pipeline)) + pName, err := config.GetStringConfig(fmt.Sprintf("ManagedPipelinesMetadata.%s.Name", pipeline)) if err != nil { return nil, err } - pFile, err := config.GetStringConfigOrError(fmt.Sprintf("ManagedPipelinesMetadata.%s.Filepath", pipeline)) + pFile, err := config.GetStringConfig(fmt.Sprintf("ManagedPipelinesMetadata.%s.Filepath", pipeline)) if err != nil { return nil, err } - platformVersion := config.GetStringConfigWithDefault("PlatformVersion", config.DefaultPlatformVersion) + platformVersion := config.GetStringConfigWithDefault("DSPO.PlatformVersion", config.DefaultPlatformVersion) // Get optional fields pDesc := config.GetStringConfigWithDefault(fmt.Sprintf("ManagedPipelinesMetadata.%s.Description", pipeline), "") - pVerName := config.GetStringConfigWithDefault(fmt.Sprintf("ManagedPipelinesMetadata.%s.VersionName", pipeline), "") + pVerName := config.GetStringConfigWithDefault(fmt.Sprintf("ManagedPipelinesMetadata.%s.VersionName", pipeline), pName) pVerDesc := config.GetStringConfigWithDefault(fmt.Sprintf("ManagedPipelinesMetadata.%s.VersionDescription", pipeline), "") // Create Sample Config item @@ -76,7 +75,7 @@ func (r *DSPAReconciler) GenerateSamplePipelineMetadataBlock(pipeline string) (m } -func (r *DSPAReconciler) GetSampleConfig(ctx context.Context, dsp *dspa.DataSciencePipelinesApplication, params *DSPAParams) (string, error) { +func (r *DSPAReconciler) GetSampleConfig(dsp *dspa.DataSciencePipelinesApplication) (string, error) { // Check if InstructLab Pipeline enabled in this DSPA enableInstructLabPipeline := false if dsp.Spec.APIServer.ManagedPipelines != nil && dsp.Spec.APIServer.ManagedPipelines.InstructLab != nil { @@ -86,10 +85,10 @@ func (r *DSPAReconciler) GetSampleConfig(ctx context.Context, dsp *dspa.DataScie } } - return r.GenerateSampleConfigJSON(enableInstructLabPipeline, dsp.Spec.APIServer.EnableSamplePipeline) + return r.generateSampleConfigJSON(enableInstructLabPipeline, dsp.Spec.APIServer.EnableSamplePipeline) } -func (r *DSPAReconciler) GenerateSampleConfigJSON(enableInstructLabPipeline, enableIrisPipeline bool) (string, error) { +func (r *DSPAReconciler) generateSampleConfigJSON(enableInstructLabPipeline, enableIrisPipeline bool) (string, error) { // Now generate a sample config var pipelineConfig = make([]map[string]string, 0) @@ -130,17 +129,14 @@ func (r *DSPAReconciler) ReconcileAPIServer(ctx context.Context, dsp *dspav1.Dat } log.Info("Generating Sample Config") - sampleConfigJSON, err := r.GetSampleConfig(ctx, dsp, params) + sampleConfigJSON, err := r.GetSampleConfig(dsp) if err != nil { return err } params.SampleConfigJSON = sampleConfigJSON // Generate configuration hash for rebooting on sample changes - hasher := sha256.New() - hasher.Write([]byte(sampleConfigJSON)) - configHash := hex.EncodeToString(hasher.Sum(nil)) - params.APIServerConfigHash = configHash + params.APIServerConfigHash = fmt.Sprintf("%x", sha256.Sum256([]byte(sampleConfigJSON))) log.Info("Applying APIServer Resources") err = r.ApplyDir(dsp, params, apiServerTemplatesDir) diff --git a/controllers/config/defaults.go b/controllers/config/defaults.go index 15e6f264e..561b6dbf2 100644 --- a/controllers/config/defaults.go +++ b/controllers/config/defaults.go @@ -195,7 +195,7 @@ func createResourceRequirement(RequestsCPU resource.Quantity, RequestsMemory res } } -func GetStringConfigOrError(configName string) (string, error) { +func GetStringConfig(configName string) (string, error) { if !viper.IsSet(configName) { return "", fmt.Errorf("value not set in config for configname %s", configName) } diff --git a/controllers/testdata/declarative/case_0/config.yaml b/controllers/testdata/declarative/case_0/config.yaml index 06225a2d7..9cd07ed6b 100644 --- a/controllers/testdata/declarative/case_0/config.yaml +++ b/controllers/testdata/declarative/case_0/config.yaml @@ -16,6 +16,18 @@ Images: RuntimeGeneric: runtimegeneric:test0 Toolbox: toolbox:test0 RHELAI: rhelai:test0 +ManagedPipelinesMetadata: + Instructlab: + Name: "[InstructLab] LLM Training Pipeline" + Description: + Filepath: /pipelines/instructlab.yaml + VersionName: "[InstructLab] LLM Training Pipeline" + Iris: + Name: "[Demo] iris-training" + Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" + Filepath: /samples/iris-pipeline-compiled.yaml + VersionName: "[Demo] iris-training" DSPO: + PlatformVersion: v0.0.0 ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml index 7d00fcfbb..0046bb7d5 100644 --- a/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml @@ -16,6 +16,8 @@ spec: dspa: testdsp0 template: metadata: + annotations: + configHash: 33ff9677391ad5d02376e8a13b5dc6a207c37120c38011f7a3bba17d63b4cfbb labels: dsp-version: v2 app: ds-pipeline-testdsp0 diff --git a/controllers/testdata/declarative/case_1/config.yaml b/controllers/testdata/declarative/case_1/config.yaml index c1f31d850..f54469202 100644 --- a/controllers/testdata/declarative/case_1/config.yaml +++ b/controllers/testdata/declarative/case_1/config.yaml @@ -15,3 +15,16 @@ Images: RuntimeGeneric: runtimegeneric:test1 Toolbox: toolbox:test1 RHELAI: rhelai:test1 +ManagedPipelinesMetadata: + Instructlab: + Name: "[InstructLab] LLM Training Pipeline" + Description: + Filepath: /pipelines/instructlab.yaml + VersionName: "[InstructLab] LLM Training Pipeline" + Iris: + Name: "[Demo] iris-training" + Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" + Filepath: /samples/iris-pipeline-compiled.yaml + VersionName: "[Demo] iris-training" +DSPO: + PlatformVersion: v0.0.0 diff --git a/controllers/testdata/declarative/case_2/config.yaml b/controllers/testdata/declarative/case_2/config.yaml index f9271d908..98cf36973 100644 --- a/controllers/testdata/declarative/case_2/config.yaml +++ b/controllers/testdata/declarative/case_2/config.yaml @@ -15,6 +15,19 @@ Images: RuntimeGeneric: runtimegeneric:test2 Toolbox: toolbox:test2 RHELAI: rhelai:test2 +ManagedPipelinesMetadata: + InstructLab: + Name: InstructLabName + Description: InstructLabDescription + Filepath: InstructLabFilepath + VersionName: InstructLabVersionName + VersionDescription: InstructLabVersionDescription + Iris: + Name: "[Demo] iris-training" + Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" + Filepath: /samples/iris-pipeline-compiled.yaml + VersionName: "[Demo] iris-training" DSPO: + PlatformVersion: v1.2.3 ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_2/deploy/cr.yaml b/controllers/testdata/declarative/case_2/deploy/cr.yaml index d923e3f06..618d73bf1 100644 --- a/controllers/testdata/declarative/case_2/deploy/cr.yaml +++ b/controllers/testdata/declarative/case_2/deploy/cr.yaml @@ -13,6 +13,9 @@ spec: argoDriverImage: argodriverimage:test2 enableOauth: true enableSamplePipeline: true + managedPipelines: + instructLab: + state: Managed customServerConfigMap: name: testserverconfigmapdspa2 key: testserverconfigmapkeydspa2 diff --git a/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml index 9833180e2..272074438 100644 --- a/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml @@ -16,6 +16,8 @@ spec: dspa: testdsp2 template: metadata: + annotations: + configHash: 0a9567df0bc820e0007b6c626b514f8b6ec5cef0c7badd566e6d063e049a5394 labels: dsp-version: v2 app: ds-pipeline-testdsp2 diff --git a/controllers/testdata/declarative/case_2/expected/created/sample-config.yaml.tmpl b/controllers/testdata/declarative/case_2/expected/created/sample-config.yaml.tmpl index aca6d21e4..4756de013 100644 --- a/controllers/testdata/declarative/case_2/expected/created/sample-config.yaml.tmpl +++ b/controllers/testdata/declarative/case_2/expected/created/sample-config.yaml.tmpl @@ -9,10 +9,4 @@ metadata: component: data-science-pipelines data: sample_config.json: |- - [ - { - "name": "[Demo] iris-training", - "description": "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow", - "file": "/samples/iris-pipeline-compiled.yaml" - } - ] + {"loadSamplesOnRestart":true,"pipelines":[{"description":"InstructLabDescription","file":"InstructLabFilepath","name":"InstructLabName","versionDescription":"InstructLabVersionDescription","versionName":"InstructLabVersionName - v1.2.3"},{"description":"[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow","file":"/samples/iris-pipeline-compiled.yaml","name":"[Demo] iris-training","versionDescription":"","versionName":"[Demo] iris-training - v1.2.3"}]} diff --git a/controllers/testdata/declarative/case_3/config.yaml b/controllers/testdata/declarative/case_3/config.yaml index 747ee1b4a..f991b5cd0 100644 --- a/controllers/testdata/declarative/case_3/config.yaml +++ b/controllers/testdata/declarative/case_3/config.yaml @@ -15,6 +15,18 @@ Images: RuntimeGeneric: runtimegeneric:test3 Toolbox: toolbox:test3 RHELAI: rhelai:test3 +ManagedPipelinesMetadata: + Instructlab: + Name: "[InstructLab] LLM Training Pipeline" + Description: + Filepath: /pipelines/instructlab.yaml + VersionName: "[InstructLab] LLM Training Pipeline" + Iris: + Name: "[Demo] iris-training" + Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" + Filepath: /samples/iris-pipeline-compiled.yaml + VersionName: "[Demo] iris-training" DSPO: + PlatformVersion: v0.0.0 ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml index 117944959..7b925c443 100644 --- a/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml @@ -16,6 +16,8 @@ spec: dspa: testdsp3 template: metadata: + annotations: + configHash: 9a8b56f5098a0d91d9db76d9c8b48e9872c0dbe71bcdc7f08f2c05bfe26c787f labels: dsp-version: v2 app: ds-pipeline-testdsp3 @@ -133,6 +135,7 @@ spec: args: - --config=/config - -logtostderr=true + - --sampleconfig=/config/sample_config.json ports: - containerPort: 8888 name: http @@ -170,6 +173,11 @@ spec: subPath: config.json - mountPath: /config/managed-pipelines name: managed-pipelines + - mountPath: /config/sample_config.json + name: sample-config + subPath: sample_config.json + - mountPath: /samples/ + name: sample-pipeline resources: requests: cpu: 250m @@ -236,4 +244,12 @@ spec: - name: managed-pipelines emptyDir: sizeLimit: 10Mi + - configMap: + defaultMode: 420 + name: sample-config-testdsp3 + name: sample-config + - configMap: + defaultMode: 420 + name: sample-pipeline-testdsp3 + name: sample-pipeline serviceAccountName: ds-pipeline-testdsp3 diff --git a/controllers/testdata/declarative/case_4/config.yaml b/controllers/testdata/declarative/case_4/config.yaml index c8eafce57..85079d05a 100644 --- a/controllers/testdata/declarative/case_4/config.yaml +++ b/controllers/testdata/declarative/case_4/config.yaml @@ -15,6 +15,18 @@ Images: RuntimeGeneric: runtimegeneric:test4 Toolbox: toolbox:test4 RHELAI: rhelai:test4 +ManagedPipelinesMetadata: + Instructlab: + Name: "[InstructLab] LLM Training Pipeline" + Description: + Filepath: /pipelines/instructlab.yaml + VersionName: "[InstructLab] LLM Training Pipeline" + Iris: + Name: "[Demo] iris-training" + Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" + Filepath: /samples/iris-pipeline-compiled.yaml + VersionName: "[Demo] iris-training" DSPO: + PlatformVersion: v0.0.0 ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml index 4cb5df715..8f3b2999f 100644 --- a/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml @@ -16,6 +16,8 @@ spec: dspa: testdsp4 template: metadata: + annotations: + configHash: 9a8b56f5098a0d91d9db76d9c8b48e9872c0dbe71bcdc7f08f2c05bfe26c787f labels: dsp-version: v2 app: ds-pipeline-testdsp4 @@ -133,6 +135,7 @@ spec: args: - --config=/config - -logtostderr=true + - --sampleconfig=/config/sample_config.json ports: - containerPort: 8888 name: http @@ -177,6 +180,11 @@ spec: subPath: config.json - mountPath: /config/managed-pipelines name: managed-pipelines + - mountPath: /config/sample_config.json + name: sample-config + subPath: sample_config.json + - mountPath: /samples/ + name: sample-pipeline - name: oauth-proxy args: - --https-address=:8443 @@ -236,4 +244,12 @@ spec: - name: managed-pipelines emptyDir: sizeLimit: 10Mi + - configMap: + defaultMode: 420 + name: sample-config-testdsp4 + name: sample-config + - configMap: + defaultMode: 420 + name: sample-pipeline-testdsp4 + name: sample-pipeline serviceAccountName: ds-pipeline-testdsp4 diff --git a/controllers/testdata/declarative/case_5/config.yaml b/controllers/testdata/declarative/case_5/config.yaml index 1216f5c6f..8284e503b 100644 --- a/controllers/testdata/declarative/case_5/config.yaml +++ b/controllers/testdata/declarative/case_5/config.yaml @@ -15,6 +15,18 @@ Images: RuntimeGeneric: runtimegeneric:test5 Toolbox: toolbox:test5 RHELAI: rhelai:test5 +ManagedPipelinesMetadata: + Instructlab: + Name: "[InstructLab] LLM Training Pipeline" + Description: + Filepath: /pipelines/instructlab.yaml + VersionName: "[InstructLab] LLM Training Pipeline" + Iris: + Name: "[Demo] iris-training" + Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" + Filepath: /samples/iris-pipeline-compiled.yaml + VersionName: "[Demo] iris-training" DSPO: + PlatformVersion: v0.0.0 ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml index 4f4f3c0b2..29a474f91 100644 --- a/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml @@ -16,6 +16,8 @@ spec: dspa: testdsp5 template: metadata: + annotations: + configHash: 9a8b56f5098a0d91d9db76d9c8b48e9872c0dbe71bcdc7f08f2c05bfe26c787f labels: dsp-version: v2 app: ds-pipeline-testdsp5 @@ -145,6 +147,7 @@ spec: args: - --config=/config - -logtostderr=true + - --sampleconfig=/config/sample_config.json - --tlsCertPath=/etc/tls/private/tls.crt - --tlsCertKeyPath=/etc/tls/private/tls.key ports: @@ -179,6 +182,11 @@ spec: name: managed-pipelines - mountPath: /etc/tls/private name: proxy-tls + - mountPath: /config/sample_config.json + name: sample-config + subPath: sample_config.json + - mountPath: /samples/ + name: sample-pipeline - name: ca-bundle mountPath: /dsp-custom-certs - name: oauth-proxy @@ -245,4 +253,12 @@ spec: configMap: name: dsp-trusted-ca-testdsp5 defaultMode: 420 + - configMap: + defaultMode: 420 + name: sample-config-testdsp5 + name: sample-config + - configMap: + defaultMode: 420 + name: sample-pipeline-testdsp5 + name: sample-pipeline serviceAccountName: ds-pipeline-testdsp5 diff --git a/controllers/testdata/declarative/case_6/config.yaml b/controllers/testdata/declarative/case_6/config.yaml index 6d26a7246..1fb7f4c00 100644 --- a/controllers/testdata/declarative/case_6/config.yaml +++ b/controllers/testdata/declarative/case_6/config.yaml @@ -15,6 +15,18 @@ Images: RuntimeGeneric: runtimegeneric:test6 Toolbox: toolbox:test6 RHELAI: rhelai:test6 +ManagedPipelinesMetadata: + Instructlab: + Name: "[InstructLab] LLM Training Pipeline" + Description: + Filepath: /pipelines/instructlab.yaml + VersionName: "[InstructLab] LLM Training Pipeline" + Iris: + Name: "[Demo] iris-training" + Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" + Filepath: /samples/iris-pipeline-compiled.yaml + VersionName: "[Demo] iris-training" DSPO: + PlatformVersion: v0.0.0 ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml index 8367a9033..c8e47aa99 100644 --- a/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml @@ -16,6 +16,8 @@ spec: dspa: testdsp6 template: metadata: + annotations: + configHash: 9a8b56f5098a0d91d9db76d9c8b48e9872c0dbe71bcdc7f08f2c05bfe26c787f labels: dsp-version: v2 app: ds-pipeline-testdsp6 @@ -133,6 +135,7 @@ spec: args: - --config=/config - -logtostderr=true + - --sampleconfig=/config/sample_config.json ports: - containerPort: 8888 name: http @@ -163,6 +166,11 @@ spec: subPath: config.json - mountPath: /config/managed-pipelines name: managed-pipelines + - mountPath: /config/sample_config.json + name: sample-config + subPath: sample_config.json + - mountPath: /samples/ + name: sample-pipeline - name: oauth-proxy args: - --https-address=:8443 @@ -222,4 +230,12 @@ spec: - name: managed-pipelines emptyDir: sizeLimit: 10Mi + - configMap: + defaultMode: 420 + name: sample-config-testdsp6 + name: sample-config + - configMap: + defaultMode: 420 + name: sample-pipeline-testdsp6 + name: sample-pipeline serviceAccountName: ds-pipeline-testdsp6 From b5dd02fbd1db2cce44f27e20a242ac122ad934e8 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 14 Feb 2025 14:51:02 -0500 Subject: [PATCH 10/14] update compatibility doc for 2.11 Signed-off-by: Humair Khan --- docs/release/compatibility.md | 1 + docs/release/compatibility.yaml | 11 +++++++++++ 2 files changed, 12 insertions(+) diff --git a/docs/release/compatibility.md b/docs/release/compatibility.md index 314a19a88..98c6ad8e7 100644 --- a/docs/release/compatibility.md +++ b/docs/release/compatibility.md @@ -6,6 +6,7 @@ Each row outlines the versions for individual subcomponents and images that are | dsp | kfp | argo | ml-metadata | envoy | ocp-pipelines | oauth-proxy | mariadb-103 | ubi-minimal | ubi-micro | openshift | |-----|-----|-----|-----|-----|-----|-----|-----|-----|-----|-----| +| 2.11 | 2.2.0 | 3.4.17 | 1.14.0 | 1.22.11 | N/A | v4.14 | 1 | N/A | N/A | 4.15,4.16,4.17 | | 2.10 | 2.2.0 | 3.4.17 | 1.14.0 | 1.22.11 | N/A | v4.14 | 1 | N/A | N/A | 4.15,4.16,4.17 | | 2.9 | 2.2.0 | 3.4.17 | 1.14.0 | 1.22.11 | N/A | v4.14 | 1 | N/A | N/A | 4.15,4.16,4.17 | | 2.8 | 2.2.0 | 3.4.17 | 1.14.0 | 1.22.11 | N/A | v4.14 | 1 | N/A | N/A | 4.15,4.16,4.17 | diff --git a/docs/release/compatibility.yaml b/docs/release/compatibility.yaml index fdf9e2195..9aed96449 100644 --- a/docs/release/compatibility.yaml +++ b/docs/release/compatibility.yaml @@ -1,3 +1,14 @@ +- dsp: '2.11' + kfp: '2.2.0' + argo: '3.4.17' + ml-metadata: '1.14.0' + envoy: '1.22.11' + ocp-pipelines: 'N/A' + oauth-proxy: 'v4.14' + mariadb-103: '1' + ubi-minimal: 'N/A' + ubi-micro: 'N/A' + openshift: '4.15,4.16,4.17' - dsp: '2.10' kfp: '2.2.0' argo: '3.4.17' From cafade7a9097cc63115ed121ddd360425e18d59d Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 14 Feb 2025 14:55:08 -0500 Subject: [PATCH 11/14] Revert "update compatibility doc for 2.11" This reverts commit b5dd02fbd1db2cce44f27e20a242ac122ad934e8. --- docs/release/compatibility.md | 1 - docs/release/compatibility.yaml | 11 ----------- 2 files changed, 12 deletions(-) diff --git a/docs/release/compatibility.md b/docs/release/compatibility.md index 98c6ad8e7..314a19a88 100644 --- a/docs/release/compatibility.md +++ b/docs/release/compatibility.md @@ -6,7 +6,6 @@ Each row outlines the versions for individual subcomponents and images that are | dsp | kfp | argo | ml-metadata | envoy | ocp-pipelines | oauth-proxy | mariadb-103 | ubi-minimal | ubi-micro | openshift | |-----|-----|-----|-----|-----|-----|-----|-----|-----|-----|-----| -| 2.11 | 2.2.0 | 3.4.17 | 1.14.0 | 1.22.11 | N/A | v4.14 | 1 | N/A | N/A | 4.15,4.16,4.17 | | 2.10 | 2.2.0 | 3.4.17 | 1.14.0 | 1.22.11 | N/A | v4.14 | 1 | N/A | N/A | 4.15,4.16,4.17 | | 2.9 | 2.2.0 | 3.4.17 | 1.14.0 | 1.22.11 | N/A | v4.14 | 1 | N/A | N/A | 4.15,4.16,4.17 | | 2.8 | 2.2.0 | 3.4.17 | 1.14.0 | 1.22.11 | N/A | v4.14 | 1 | N/A | N/A | 4.15,4.16,4.17 | diff --git a/docs/release/compatibility.yaml b/docs/release/compatibility.yaml index 9aed96449..fdf9e2195 100644 --- a/docs/release/compatibility.yaml +++ b/docs/release/compatibility.yaml @@ -1,14 +1,3 @@ -- dsp: '2.11' - kfp: '2.2.0' - argo: '3.4.17' - ml-metadata: '1.14.0' - envoy: '1.22.11' - ocp-pipelines: 'N/A' - oauth-proxy: 'v4.14' - mariadb-103: '1' - ubi-minimal: 'N/A' - ubi-micro: 'N/A' - openshift: '4.15,4.16,4.17' - dsp: '2.10' kfp: '2.2.0' argo: '3.4.17' From 84902fe6b98fbb12449ce1391a74313c18b20462 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Fri, 14 Feb 2025 15:04:51 -0500 Subject: [PATCH 12/14] Revert make-deploy overlay to use ODH:main image Signed-off-by: Giulio Frasca --- config/overlays/make-deploy/kustomization.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config/overlays/make-deploy/kustomization.yaml b/config/overlays/make-deploy/kustomization.yaml index ab0fdf558..f59b8ee7a 100644 --- a/config/overlays/make-deploy/kustomization.yaml +++ b/config/overlays/make-deploy/kustomization.yaml @@ -8,5 +8,5 @@ patchesStrategicMerge: - img_patch.yaml images: - name: controller - newName: quay.io/gmfrasca/dspo - newTag: managed-pipelines-v0.0.5 + newName: quay.io/opendatahub/data-science-pipelines-operator + newTag: main From 46325dea080363319a1059142ace49eba72b6dc6 Mon Sep 17 00:00:00 2001 From: dsp-developers <140449482+dsp-developers@users.noreply.github.com> Date: Fri, 14 Feb 2025 20:22:08 +0000 Subject: [PATCH 13/14] Generate params for 2.10 --- config/base/params.env | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/config/base/params.env b/config/base/params.env index 619ec513a..68d48283d 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -1,23 +1,21 @@ -IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator:latest -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server:latest -IMAGES_PERSISTENCEAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent:latest -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow:latest -IMAGES_ARGO_EXEC=quay.io/opendatahub/ds-pipelines-argo-argoexec:odh-v3.4.17-1 -IMAGES_ARGO_WORKFLOWCONTROLLER=quay.io/opendatahub/ds-pipelines-argo-workflowcontroller:odh-v3.4.17-1 -IMAGES_LAUNCHER=quay.io/opendatahub/ds-pipelines-launcher:latest -IMAGES_DRIVER=quay.io/opendatahub/ds-pipelines-driver:latest -IMAGES_MLMDGRPC=quay.io/opendatahub/mlmd-grpc-server:latest -IMAGES_PIPELINESRUNTIMEGENERIC=quay.io/opendatahub/ds-pipelines-runtime-generic:latest +IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:8f3681ca2d791dcbeb24fa136d33aa7b7bbc1c181505c2ff9a0f2c252260de08 +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:0e879726785260c1bb61360d5b3f23fb4bfbc9bbf758e7a529a0e4d13161a1de +IMAGES_PERSISTENCEAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:02f61ffea378a086b8c2a481adb7708fc66ad6c8f3ace1b268ccc6b3a26f785f +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:17ef27047183ed03e51fdd3e58989a71a2250084ecaa4fe2219f7a1c8347e6cb +IMAGES_LAUNCHER=quay.io/opendatahub/ds-pipelines-launcher@sha256:c957d1aeedde59e33c6571692d85ee8590c404a69bafd0d11b5ea1c5761a3a6a +IMAGES_DRIVER=quay.io/opendatahub/ds-pipelines-driver@sha256:a60eb599f63e05aafef2445bc9695946a306ba48d5f084097db186c30f056a8e +IMAGES_PIPELINESRUNTIMEGENERIC=quay.io/opendatahub/ds-pipelines-runtime-generic@sha256:8fd87a644afe361020167eec539821f634846695f1e25e748c719b97a6ae05cd +IMAGES_ARGO_WORKFLOWCONTROLLER=quay.io/opendatahub/ds-pipelines-argo-workflowcontroller@sha256:995f06328569b558d63cf727c0674df71b1927f74ab60e966596ccb8c06e12f8 +IMAGES_ARGO_EXEC=quay.io/opendatahub/ds-pipelines-argo-argoexec@sha256:da1b0d502ae97160185ec5debc2f0c8d54f70b01be4ea4a9339d7137cc3918a9 +IMAGES_MLMDGRPC=quay.io/opendatahub/mlmd-grpc-server@sha256:9e905b2de2fb6801716a14ebd6e589cac82fef26741825d06717d695a37ff199 +IMAGES_MLMDENVOY=registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:02b834fd74da71ec37f6a5c0d10aac9a679d1a0f4e510c4f77723ef2367e858a +IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:3d30992e60774f887c4e7959c81b0c41b0d82d042250b3b56f05ab67fd4cdee1 +IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:4f8d66597feeb32bb18699326029f9a71a5aca4a57679d636b876377c2e95695 IMAGES_TOOLBOX=registry.redhat.io/ubi9/toolbox@sha256:da31dee8904a535d12689346e65e5b00d11a6179abf1fa69b548dbd755fa2770 IMAGES_RHELAI=registry.redhat.io/rhelai1/instructlab-nvidia-rhel9@sha256:05cfba1fb13ed54b1de4d021da2a31dd78ba7d8cc48e10c7fe372815899a18ae -IMAGES_MLMDENVOY=registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:b30d60cd458133430d4c92bf84911e03cecd02f60e88a58d1c6c003543cf833a -IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:f0ee0d27bb784e289f7d88cc8ee0e085ca70e88a5d126562105542f259a1ac01 -IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:8ce44de8c683f198bf24ba36cd17e89708153d11f5b42c0a27e77f8fdb233551 ZAP_LOG_LEVEL=info MAX_CONCURRENT_RECONCILES=10 DSPO_HEALTHCHECK_DATABASE_CONNECTIONTIMEOUT=15s DSPO_HEALTHCHECK_OBJECTSTORE_CONNECTIONTIMEOUT=15s DSPO_REQUEUE_TIME=20s DSPO_APISERVER_INCLUDE_OWNERREFERENCE=true -MANAGEDPIPELINES="{}" -PLATFORMVERSION="v0.0.0" From c12f4f4273a16f41518a92c8021f8337e18f2ab4 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 14 Feb 2025 15:39:02 -0500 Subject: [PATCH 14/14] update params to match latest configs Signed-off-by: Humair Khan --- config/base/params.env | 8 +++++--- scripts/release/params.py | 10 ++++++---- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/config/base/params.env b/config/base/params.env index 68d48283d..1a72d6500 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -8,9 +8,9 @@ IMAGES_PIPELINESRUNTIMEGENERIC=quay.io/opendatahub/ds-pipelines-runtime-generic@ IMAGES_ARGO_WORKFLOWCONTROLLER=quay.io/opendatahub/ds-pipelines-argo-workflowcontroller@sha256:995f06328569b558d63cf727c0674df71b1927f74ab60e966596ccb8c06e12f8 IMAGES_ARGO_EXEC=quay.io/opendatahub/ds-pipelines-argo-argoexec@sha256:da1b0d502ae97160185ec5debc2f0c8d54f70b01be4ea4a9339d7137cc3918a9 IMAGES_MLMDGRPC=quay.io/opendatahub/mlmd-grpc-server@sha256:9e905b2de2fb6801716a14ebd6e589cac82fef26741825d06717d695a37ff199 -IMAGES_MLMDENVOY=registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:02b834fd74da71ec37f6a5c0d10aac9a679d1a0f4e510c4f77723ef2367e858a -IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:3d30992e60774f887c4e7959c81b0c41b0d82d042250b3b56f05ab67fd4cdee1 -IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:4f8d66597feeb32bb18699326029f9a71a5aca4a57679d636b876377c2e95695 +IMAGES_MLMDENVOY=registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:b30d60cd458133430d4c92bf84911e03cecd02f60e88a58d1c6c003543cf833a +IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:f0ee0d27bb784e289f7d88cc8ee0e085ca70e88a5d126562105542f259a1ac01 +IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:8ce44de8c683f198bf24ba36cd17e89708153d11f5b42c0a27e77f8fdb233551 IMAGES_TOOLBOX=registry.redhat.io/ubi9/toolbox@sha256:da31dee8904a535d12689346e65e5b00d11a6179abf1fa69b548dbd755fa2770 IMAGES_RHELAI=registry.redhat.io/rhelai1/instructlab-nvidia-rhel9@sha256:05cfba1fb13ed54b1de4d021da2a31dd78ba7d8cc48e10c7fe372815899a18ae ZAP_LOG_LEVEL=info @@ -19,3 +19,5 @@ DSPO_HEALTHCHECK_DATABASE_CONNECTIONTIMEOUT=15s DSPO_HEALTHCHECK_OBJECTSTORE_CONNECTIONTIMEOUT=15s DSPO_REQUEUE_TIME=20s DSPO_APISERVER_INCLUDE_OWNERREFERENCE=true +MANAGEDPIPELINES="{}" +PLATFORMVERSION="v0.0.0" diff --git a/scripts/release/params.py b/scripts/release/params.py index f75c62310..c97aec005 100644 --- a/scripts/release/params.py +++ b/scripts/release/params.py @@ -31,9 +31,9 @@ } STATIC_REPOS = { - "IMAGES_MLMDENVOY": "registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:02b834fd74da71ec37f6a5c0d10aac9a679d1a0f4e510c4f77723ef2367e858a", - "IMAGES_MARIADB": "registry.redhat.io/rhel8/mariadb-103@sha256:3d30992e60774f887c4e7959c81b0c41b0d82d042250b3b56f05ab67fd4cdee1", - "IMAGES_OAUTHPROXY": "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:4f8d66597feeb32bb18699326029f9a71a5aca4a57679d636b876377c2e95695", + "IMAGES_MLMDENVOY": "registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:b30d60cd458133430d4c92bf84911e03cecd02f60e88a58d1c6c003543cf833a", + "IMAGES_MARIADB": "registry.redhat.io/rhel8/mariadb-103@sha256:f0ee0d27bb784e289f7d88cc8ee0e085ca70e88a5d126562105542f259a1ac01", + "IMAGES_OAUTHPROXY": "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:8ce44de8c683f198bf24ba36cd17e89708153d11f5b42c0a27e77f8fdb233551", "IMAGES_TOOLBOX": "registry.redhat.io/ubi9/toolbox@sha256:da31dee8904a535d12689346e65e5b00d11a6179abf1fa69b548dbd755fa2770", "IMAGES_RHELAI": "registry.redhat.io/rhelai1/instructlab-nvidia-rhel9@sha256:05cfba1fb13ed54b1de4d021da2a31dd78ba7d8cc48e10c7fe372815899a18ae", } @@ -44,7 +44,9 @@ "DSPO_HEALTHCHECK_DATABASE_CONNECTIONTIMEOUT": "15s", "DSPO_HEALTHCHECK_OBJECTSTORE_CONNECTIONTIMEOUT": "15s", "DSPO_REQUEUE_TIME": "20s", - "DSPO_APISERVER_INCLUDE_OWNERREFERENCE": "true" + "DSPO_APISERVER_INCLUDE_OWNERREFERENCE": "true", + "MANAGEDPIPELINES": "\"{}\"", + "PLATFORMVERSION": "\"v0.0.0\"" }