Skip to content

Commit d4adcb9

Browse files
committed
Merge branch '2024b' of github.com:opendatahub-io/notebooks into sync-24b-downstream
2 parents 5468f8b + ec54d80 commit d4adcb9

File tree

185 files changed

+84775
-55583
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

185 files changed

+84775
-55583
lines changed

.github/workflows/build-notebooks-TEMPLATE.yaml

Lines changed: 50 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,10 @@ name: Build & Publish Notebook Servers (TEMPLATE)
1818

1919
jobs:
2020
build:
21-
runs-on: ubuntu-22.04
21+
strategy:
22+
matrix:
23+
os: [ubuntu-22.04]
24+
runs-on: ${{matrix.os}}
2225
env:
2326
# Some pieces of code (image pulls for example) in podman consult TMPDIR or default to /var/tmp
2427
TMPDIR: /home/runner/.local/share/containers/tmpdir
@@ -34,6 +37,8 @@ jobs:
3437
TRIVY_VULNDB: "/home/runner/.local/share/containers/trivy_db"
3538
# Targets (and their folder) that should be scanned using FS instead of IMAGE scan due to resource constraints
3639
TRIVY_SCAN_FS_JSON: '{}'
40+
# Poetry version for use in running tests
41+
POETRY_VERSION: '2.0.0'
3742

3843
steps:
3944

@@ -57,7 +62,7 @@ jobs:
5762

5863
- name: Free up additional disk space
5964
# https://docs.github.com/en/actions/learn-github-actions/expressions
60-
if: "${{ contains(inputs.target, 'rocm') || contains(inputs.target, 'cuda') || contains(inputs.target, 'intel') ||
65+
if: "${{ contains(inputs.target, 'rocm') || contains(inputs.target, 'cuda') ||
6166
contains(inputs.target, 'pytorch') || contains(inputs.target, 'tensorflow') }}"
6267
run: |
6368
set -x
@@ -232,17 +237,19 @@ jobs:
232237
# region Image build
233238

234239
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#push
235-
- name: "push|schedule: make ${{ inputs.target }}"
240+
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
241+
242+
- name: "push|schedule|workflow_dispatch: make ${{ inputs.target }}"
236243
run: |
237244
make ${{ inputs.target }}
238-
if: ${{ fromJson(inputs.github).event_name == 'push' || fromJson(inputs.github).event_name == 'schedule' }}
245+
if: ${{ fromJson(inputs.github).event_name == 'push' ||
246+
fromJson(inputs.github).event_name == 'schedule' ||
247+
fromJson(inputs.github).event_name == 'workflow_dispatch' }}
239248
env:
240249
IMAGE_TAG: "${{ steps.calculated_vars.outputs.IMAGE_TAG }}"
241250
CONTAINER_BUILD_CACHE_ARGS: "--cache-from ${{ env.CACHE }} --cache-to ${{ env.CACHE }}"
242251
# dependent images were already built and pushed, so just let podman pull it
243252
BUILD_DEPENDENT_IMAGES: "no"
244-
245-
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
246253
- name: "pull_request: make ${{ inputs.target }}"
247254
run: |
248255
make ${{ inputs.target }}
@@ -258,6 +265,43 @@ jobs:
258265

259266
# endregion
260267

268+
# region Pytest image tests
269+
270+
- name: Install poetry
271+
if: steps.cache-poetry-restore.outputs.cache-hit != 'true'
272+
run: pipx install poetry==${{ env.POETRY_VERSION }}
273+
env:
274+
PIPX_HOME: /home/runner/.local/pipx
275+
PIPX_BIN_DIR: /home/runner/.local/bin
276+
277+
- name: Check poetry is installed correctly
278+
run: poetry env info
279+
280+
- name: Set up Python
281+
id: setup-python
282+
uses: actions/setup-python@v5
283+
with:
284+
python-version: '3.12'
285+
cache: 'poetry'
286+
287+
- name: Configure poetry
288+
run: poetry env use "${{ steps.setup-python.outputs.python-path }}"
289+
290+
- name: Install deps
291+
run: poetry install --sync
292+
293+
- name: Run container tests (in PyTest)
294+
run: |
295+
set -Eeuxo pipefail
296+
poetry run pytest --capture=fd tests/containers --image="${{ steps.calculated_vars.outputs.OUTPUT_IMAGE }}"
297+
env:
298+
DOCKER_HOST: "unix:///var/run/podman/podman.sock"
299+
TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE: "/var/run/podman/podman.sock"
300+
# pulling the Ryuk container from docker.io introduces CI flakiness
301+
TESTCONTAINERS_RYUK_DISABLED: "true"
302+
303+
# endregion Pytest image tests
304+
261305
# region Makefile image tests
262306

263307
- name: "Check if we have tests or not"

.github/workflows/build-notebooks.yaml

Lines changed: 0 additions & 77 deletions
Original file line numberDiff line numberDiff line change
@@ -167,83 +167,6 @@
167167
},
168168
"secrets": "inherit"
169169
},
170-
"intel-base-gpu-ubi9-python-3_11": {
171-
"needs": [
172-
"base-ubi9-python-3_11"
173-
],
174-
"uses": "./.github/workflows/build-notebooks-TEMPLATE.yaml",
175-
"with": {
176-
"target": "intel-base-gpu-ubi9-python-3.11",
177-
"github": "${{ toJSON(github) }}"
178-
},
179-
"secrets": "inherit"
180-
},
181-
"intel-runtime-tensorflow-ubi9-python-3_11": {
182-
"needs": [
183-
"intel-base-gpu-ubi9-python-3_11"
184-
],
185-
"uses": "./.github/workflows/build-notebooks-TEMPLATE.yaml",
186-
"with": {
187-
"target": "intel-runtime-tensorflow-ubi9-python-3.11",
188-
"github": "${{ toJSON(github) }}"
189-
},
190-
"secrets": "inherit"
191-
},
192-
"jupyter-intel-tensorflow-ubi9-python-3_11": {
193-
"needs": [
194-
"intel-base-gpu-ubi9-python-3_11"
195-
],
196-
"uses": "./.github/workflows/build-notebooks-TEMPLATE.yaml",
197-
"with": {
198-
"target": "jupyter-intel-tensorflow-ubi9-python-3.11",
199-
"github": "${{ toJSON(github) }}"
200-
},
201-
"secrets": "inherit"
202-
},
203-
"intel-runtime-pytorch-ubi9-python-3_11": {
204-
"needs": [
205-
"intel-base-gpu-ubi9-python-3_11"
206-
],
207-
"uses": "./.github/workflows/build-notebooks-TEMPLATE.yaml",
208-
"with": {
209-
"target": "intel-runtime-pytorch-ubi9-python-3.11",
210-
"github": "${{ toJSON(github) }}"
211-
},
212-
"secrets": "inherit"
213-
},
214-
"jupyter-intel-pytorch-ubi9-python-3_11": {
215-
"needs": [
216-
"intel-base-gpu-ubi9-python-3_11"
217-
],
218-
"uses": "./.github/workflows/build-notebooks-TEMPLATE.yaml",
219-
"with": {
220-
"target": "jupyter-intel-pytorch-ubi9-python-3.11",
221-
"github": "${{ toJSON(github) }}"
222-
},
223-
"secrets": "inherit"
224-
},
225-
"intel-runtime-ml-ubi9-python-3_11": {
226-
"needs": [
227-
"base-ubi9-python-3_11"
228-
],
229-
"uses": "./.github/workflows/build-notebooks-TEMPLATE.yaml",
230-
"with": {
231-
"target": "intel-runtime-ml-ubi9-python-3.11",
232-
"github": "${{ toJSON(github) }}"
233-
},
234-
"secrets": "inherit"
235-
},
236-
"jupyter-intel-ml-ubi9-python-3_11": {
237-
"needs": [
238-
"base-ubi9-python-3_11"
239-
],
240-
"uses": "./.github/workflows/build-notebooks-TEMPLATE.yaml",
241-
"with": {
242-
"target": "jupyter-intel-ml-ubi9-python-3.11",
243-
"github": "${{ toJSON(github) }}"
244-
},
245-
"secrets": "inherit"
246-
},
247170
"base-c9s-python-3_11": {
248171
"needs": [],
249172
"uses": "./.github/workflows/build-notebooks-TEMPLATE.yaml",

.github/workflows/pr-merge-image-delete.yml

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -78,13 +78,6 @@ jobs:
7878
skopeo delete docker://${QUAY_IMAGE_REPO}:codeserver-ubi9-python-3.11-pr-${{ env.PR }}
7979
skopeo delete docker://${QUAY_IMAGE_REPO}:rstudio-c9s-python-3.11-pr-${{ env.PR }}
8080
skopeo delete docker://${QUAY_IMAGE_REPO}:cuda-rstudio-c9s-python-3.11-pr-${{ env.PR }}
81-
skopeo delete docker://${QUAY_IMAGE_REPO}:intel-base-gpu-ubi9-python-3.11-pr-${{ env.PR }}
82-
skopeo delete docker://${QUAY_IMAGE_REPO}:intel-runtime-tensorflow-ubi9-python-3.11-pr-${{ env.PR }}
83-
skopeo delete docker://${QUAY_IMAGE_REPO}:intel-runtime-pytorch-ubi9-python-3.11-pr-${{ env.PR }}
84-
skopeo delete docker://${QUAY_IMAGE_REPO}:intel-runtime-ml-ubi9-python-3.11-pr-${{ env.PR }}
85-
skopeo delete docker://${QUAY_IMAGE_REPO}:jupyter-intel-tensorflow-ubi9-python-3.11-pr-${{ env.PR }}
86-
skopeo delete docker://${QUAY_IMAGE_REPO}:jupyter-intel-pytorch-ubi9-python-3.11-pr-${{ env.PR }}
87-
skopeo delete docker://${QUAY_IMAGE_REPO}:jupyter-intel-ml-ubi9-python-3.11-pr-${{ env.PR }}
8881
skopeo delete docker://${QUAY_IMAGE_REPO}:rocm-ubi9-python-3.11-pr-${{ env.PR }}
8982
skopeo delete docker://${QUAY_IMAGE_REPO}:rocm-jupyter-minimal-ubi9-python-3.11-pr-${{ env.PR }}
9083
skopeo delete docker://${QUAY_IMAGE_REPO}:rocm-jupyter-datascience-ubi9-python-3.11-pr-${{ env.PR }}

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,7 @@ venv/
128128
ENV/
129129
env.bak/
130130
venv.bak/
131+
.DS_store
131132

132133
# Spyder project settings
133134
.spyderproject

Makefile

Lines changed: 16 additions & 102 deletions
Original file line numberDiff line numberDiff line change
@@ -28,13 +28,15 @@ BUILD_DEPENDENT_IMAGES ?= yes
2828
PUSH_IMAGES ?= yes
2929

3030
# OS dependant: Generate date, select appropriate cmd to locate container engine
31-
ifeq ($(OS), Windows_NT)
32-
DATE ?= $(shell powershell -Command "Get-Date -Format 'yyyyMMdd'")
33-
WHERE_WHICH ?= where
34-
else
35-
DATE ?= $(shell date +'%Y%m%d')
36-
WHERE_WHICH ?= which
31+
ifdef OS
32+
ifeq ($(OS), Windows_NT)
33+
DATE ?= $(shell powershell -Command "Get-Date -Format 'yyyyMMdd'")
34+
WHERE_WHICH ?= where
35+
endif
3736
endif
37+
DATE ?= $(shell date +'%Y%m%d')
38+
WHERE_WHICH ?= which
39+
3840

3941
# linux/amd64 or darwin/arm64
4042
OS_ARCH=$(shell go env GOOS)/$(shell go env GOARCH)
@@ -186,41 +188,6 @@ runtime-cuda-tensorflow-ubi9-python-3.11: cuda-ubi9-python-3.11
186188
codeserver-ubi9-python-3.11: base-ubi9-python-3.11
187189
$(call image,$@,codeserver/ubi9-python-3.11,$<)
188190

189-
# Build and push base-anaconda-python-3.11-intel-gpu image to the registry
190-
.PHONY: intel-base-gpu-ubi9-python-3.11
191-
intel-base-gpu-ubi9-python-3.11: base-ubi9-python-3.11
192-
$(call image,$@,intel/base/gpu/ubi9-python-3.11,$<)
193-
194-
# Build and push intel-runtime-tensorflow-ubi9-python-3.11 image to the registry
195-
.PHONY: intel-runtime-tensorflow-ubi9-python-3.11
196-
intel-runtime-tensorflow-ubi9-python-3.11: intel-base-gpu-ubi9-python-3.11
197-
$(call image,$@,intel/runtimes/tensorflow/ubi9-python-3.11,$<)
198-
199-
# Build and push jupyter-intel-tensorflow-ubi9-python-3.11 image to the registry
200-
.PHONY: jupyter-intel-tensorflow-ubi9-python-3.11
201-
jupyter-intel-tensorflow-ubi9-python-3.11: intel-base-gpu-ubi9-python-3.11
202-
$(call image,$@,jupyter/intel/tensorflow/ubi9-python-3.11,$<)
203-
204-
# Build and push intel-runtime-pytorch-ubi9-python-3.11 image to the registry
205-
.PHONY: intel-runtime-pytorch-ubi9-python-3.11
206-
intel-runtime-pytorch-ubi9-python-3.11: intel-base-gpu-ubi9-python-3.11
207-
$(call image,$@,intel/runtimes/pytorch/ubi9-python-3.11,$<)
208-
209-
# Build and push jupyter-intel-pytorch-ubi9-python-3.11 image to the registry
210-
.PHONY: jupyter-intel-pytorch-ubi9-python-3.11
211-
jupyter-intel-pytorch-ubi9-python-3.11: intel-base-gpu-ubi9-python-3.11
212-
$(call image,$@,jupyter/intel/pytorch/ubi9-python-3.11,$<)
213-
214-
# Build and push intel-runtime-ml-ubi9-python-3.11 image to the registry
215-
.PHONY: intel-runtime-ml-ubi9-python-3.11
216-
intel-runtime-ml-ubi9-python-3.11: base-ubi9-python-3.11
217-
$(call image,$@,intel/runtimes/ml/ubi9-python-3.11,$<)
218-
219-
# Build and push jupyter-intel-ml-ubi9-python-3.11 image to the registry
220-
.PHONY: jupyter-intel-ml-ubi9-python-3.11
221-
jupyter-intel-ml-ubi9-python-3.11: base-ubi9-python-3.11
222-
$(call image,$@,jupyter/intel/ml/ubi9-python-3.11,$<)
223-
224191
####################################### Buildchain for Python 3.11 using C9S #######################################
225192

226193
# Build and push base-c9s-python-3.11 image to the registry
@@ -340,64 +307,11 @@ undeploy-c9s-%: bin/kubectl
340307
$(info # Undeploying notebook from $(NOTEBOOK_DIR) directory...)
341308
$(KUBECTL_BIN) delete -k $(NOTEBOOK_DIR)
342309

343-
# Function for testing a notebook with papermill
344-
# ARG 1: Notebook name
345-
# ARG 1: UBI flavor
346-
# ARG 1: Python kernel
347-
define test_with_papermill
348-
$(eval PREFIX_NAME := $(subst /,-,$(1)_$(2)))
349-
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "python3 -m pip install papermill"
350-
if ! $(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "wget ${NOTEBOOK_REPO_BRANCH_BASE}/jupyter/$(1)/$(2)-$(3)/test/test_notebook.ipynb -O test_notebook.ipynb && python3 -m papermill test_notebook.ipynb $(PREFIX_NAME)_output.ipynb --kernel python3 --stderr-file $(PREFIX_NAME)_error.txt" ; then
351-
echo "ERROR: The $(1) $(2) notebook encountered a failure. To investigate the issue, you can review the logs located in the ocp-ci cluster on 'artifacts/notebooks-e2e-tests/jupyter-$(1)-$(2)-$(3)-test-e2e' directory or run 'cat $(PREFIX_NAME)_error.txt' within your container. The make process has been aborted."
352-
exit 1
353-
fi
354-
if $(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "cat $(PREFIX_NAME)_error.txt | grep --quiet FAILED" ; then
355-
echo "ERROR: The $(1) $(2) notebook encountered a failure. The make process has been aborted."
356-
$(KUBECTL_BIN) exec $(FULL_NOTEBOOK_NAME) -- /bin/sh -c "cat $(PREFIX_NAME)_error.txt"
357-
exit 1
358-
fi
359-
endef
360-
361310
# Verify the notebook's readiness by pinging the /api endpoint and executing the corresponding test_notebook.ipynb file in accordance with the build chain logic.
362311
.PHONY: test
363312
test-%: bin/kubectl
364-
# Verify the notebook's readiness by pinging the /api endpoint
365-
$(eval NOTEBOOK_NAME := $(subst .,-,$(subst cuda-,,$*)))
366-
$(eval PYTHON_VERSION := $(shell echo $* | sed 's/.*-python-//'))
367-
$(info # Running tests for $(NOTEBOOK_NAME) notebook...)
368-
$(KUBECTL_BIN) wait --for=condition=ready pod -l app=$(NOTEBOOK_NAME) --timeout=600s
369-
$(KUBECTL_BIN) port-forward svc/$(NOTEBOOK_NAME)-notebook 8888:8888 & curl --retry 5 --retry-delay 5 --retry-connrefused http://localhost:8888/notebook/opendatahub/jovyan/api ; EXIT_CODE=$$?; echo && pkill --full "^$(KUBECTL_BIN).*port-forward.*"
370-
$(eval FULL_NOTEBOOK_NAME = $(shell ($(KUBECTL_BIN) get pods -l app=$(NOTEBOOK_NAME) -o custom-columns=":metadata.name" | tr -d '\n')))
371-
372-
# Tests notebook's functionalities
373-
if echo "$(FULL_NOTEBOOK_NAME)" | grep -q "minimal-ubi9"; then
374-
$(call test_with_papermill,minimal,ubi9,python-$(PYTHON_VERSION))
375-
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "intel-tensorflow-ubi9"; then
376-
$(call test_with_papermill,intel/tensorflow,ubi9,python-$(PYTHON_VERSION))
377-
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "intel-pytorch-ubi9"; then
378-
$(call test_with_papermill,intel/pytorch,ubi9,python-$(PYTHON_VERSION))
379-
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "datascience-ubi9"; then
380-
$(MAKE) validate-ubi9-datascience PYTHON_VERSION=$(PYTHON_VERSION) -e FULL_NOTEBOOK_NAME=$(FULL_NOTEBOOK_NAME)
381-
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "pytorch-ubi9"; then
382-
$(MAKE) validate-ubi9-datascience PYTHON_VERSION=$(PYTHON_VERSION) -e FULL_NOTEBOOK_NAME=$(FULL_NOTEBOOK_NAME)
383-
$(call test_with_papermill,pytorch,ubi9,python-$(PYTHON_VERSION))
384-
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "tensorflow-ubi9"; then
385-
$(MAKE) validate-ubi9-datascience PYTHON_VERSION=$(PYTHON_VERSION) -e FULL_NOTEBOOK_NAME=$(FULL_NOTEBOOK_NAME)
386-
$(call test_with_papermill,tensorflow,ubi9,python-$(PYTHON_VERSION))
387-
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "intel-ml-ubi9"; then
388-
$(call test_with_papermill,intel/ml,ubi9,python-$(PYTHON_VERSION))
389-
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "trustyai-ubi9"; then
390-
$(call test_with_papermill,trustyai,ubi9,python-$(PYTHON_VERSION))
391-
elif echo "$(FULL_NOTEBOOK_NAME)" | grep -q "anaconda"; then
392-
echo "There is no test notebook implemented yet for Anaconda Notebook...."
393-
else
394-
echo "No matching condition found for $(FULL_NOTEBOOK_NAME)."
395-
fi
396-
397-
.PHONY: validate-ubi9-datascience
398-
validate-ubi9-datascience:
399-
$(call test_with_papermill,minimal,ubi9,python-$(PYTHON_VERSION))
400-
$(call test_with_papermill,datascience,ubi9,python-$(PYTHON_VERSION))
313+
$(info # Running tests for $* notebook...)
314+
@./scripts/test_jupyter_with_papermill.sh $*
401315

402316
# Validate that runtime image meets minimum criteria
403317
# This validation is created from subset of https://github.com/elyra-ai/elyra/blob/9c417d2adc9d9f972de5f98fd37f6945e0357ab9/Makefile#L325
@@ -520,12 +434,7 @@ BASE_DIRS := base/c9s-python-$(PYTHON_VERSION) \
520434
# Default value is false, can be overiden
521435
# The below directories are not supported on tier-1
522436
INCLUDE_OPT_DIRS ?= false
523-
OPT_DIRS := jupyter/intel/ml/ubi9-python-$(PYTHON_VERSION) \
524-
jupyter/intel/pytorch/ubi9-python-$(PYTHON_VERSION) \
525-
jupyter/intel/tensorflow/ubi9-python-$(PYTHON_VERSION) \
526-
intel/runtimes/ml/ubi9-python-$(PYTHON_VERSION) \
527-
intel/runtimes/pytorch/ubi9-python-$(PYTHON_VERSION) \
528-
intel/runtimes/tensorflow/ubi9-python-$(PYTHON_VERSION)
437+
OPT_DIRS :=
529438

530439
# This recipe gets args, can be used like
531440
# make refresh-pipfilelock-files PYTHON_VERSION=3.11 INCLUDE_OPT_DIRS=false
@@ -554,6 +463,11 @@ refresh-pipfilelock-files:
554463
fi
555464
done
556465

466+
echo "Regenerating requirements.txt files"
467+
pushd $(ROOT_DIR)
468+
bash $(ROOT_DIR)/scripts/sync-requirements-txt.sh
469+
popd
470+
557471
# This is only for the workflow action
558472
# For running manually, set the required environment variables
559473
.PHONY: scan-image-vulnerabilities

0 commit comments

Comments
 (0)