Skip to content

Commit 4c01671

Browse files
authored
Deprecate GPU-related files, improve README, resolve CVEs (#6986)
1 parent 75d0c10 commit 4c01671

File tree

9 files changed

+514
-426
lines changed

9 files changed

+514
-426
lines changed

components/alibi-explain-server/Dockerfile

Lines changed: 9 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
ARG VERSION
22
ARG BASE_IMAGE
3-
FROM ${BASE_IMAGE}:${VERSION} as base
3+
FROM ${BASE_IMAGE}:${VERSION} AS base
44

55
ARG VERSION
66
LABEL name="Seldon Alibi Wrapper" \
@@ -10,7 +10,7 @@ LABEL name="Seldon Alibi Wrapper" \
1010
summary="Alibi Explainer Wrapper for Seldon Core" \
1111
description="Allows Seldon Core inference models to run with a black box model explanation model from the Alibi:Explain project"
1212

13-
FROM base as builder
13+
FROM base AS builder
1414

1515
USER root
1616

@@ -24,25 +24,21 @@ RUN mkdir microservice
2424
WORKDIR /microservice
2525

2626
# Install Poetry
27-
ENV POETRY_HOME /microservice/.poetry
27+
ENV POETRY_HOME=/microservice/.poetry
2828
RUN /opt/conda/bin/pip install --no-cache-dir "poetry==2.1.2"
2929

3030
# Replace vulnerable pip wheel embedded inside virtualenv(CVE-2025-8869)
31-
RUN find /opt/conda/lib/python3.12/site-packages/virtualenv/seed/wheels/embed/ -name "pip-*.whl" -delete && \
32-
/opt/conda/bin/pip wheel pip==25.3.0 --wheel-dir /opt/conda/lib/python3.12/site-packages/virtualenv/seed/wheels/embed/
31+
RUN PIP_WHEEL_DIR=/opt/conda/lib/python3.12/ensurepip/_bundled && \
32+
/opt/conda/bin/pip wheel "pip==25.3.0" "setuptools>=75.0.0,<76.0.0" "wheel>=0.44.0,<0.45.0" --wheel-dir $PIP_WHEEL_DIR && \
33+
rm -f $PIP_WHEEL_DIR/pip-25.0.1-py3-none-any.whl
3334

34-
ENV PATH "$POETRY_HOME/bin:$PATH"
35-
ENV POETRY_VIRTUALENVS_CREATE false
35+
ENV PATH="$POETRY_HOME/bin:$PATH"
36+
ENV POETRY_VIRTUALENVS_CREATE=false
3637

3738
# Install the dependencies only
3839
COPY poetry.lock pyproject.toml ./
39-
## Disable Poetry's new installer to avoid JSONDecodeError
40-
## https://github.com/python-poetry/poetry/issues/4210
41-
## NOTE: Removing explicitly requirements.txt file from subdeps test
42-
## dependencies causing false positives in Snyk.
4340
RUN poetry install --no-root
4441

45-
4642
# Add licences
4743
RUN mkdir /licenses
4844
RUN mkdir ./licenses && pip-licenses --from=mixed --format=csv --output-file=./licenses/license_info.csv && \
@@ -62,7 +58,7 @@ COPY README.md README.md
6258
# Install the project code
6359
RUN poetry install
6460

65-
FROM base as final
61+
FROM base AS final
6662
WORKDIR /microservice
6763

6864
RUN microdnf -y update

components/alibi-explain-server/Makefile

Lines changed: 4 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@ get_apis:
1818
$(MAKE) -C ${SELDON_CORE_DIR}/proto/tensorflow clean
1919

2020
build_apis: get_apis
21-
pip install -r requirements-apis.txt
2221
cd alibiexplainer && python \
2322
-m grpc.tools.protoc \
2423
-I./ \
@@ -49,15 +48,9 @@ fmt:
4948
docker-build:
5049
docker build --platform=linux/amd64 --file=Dockerfile --build-arg BASE_IMAGE=${BASE_IMAGE} --build-arg VERSION=${VERSION} -t ${IMAGE}:${VERSION} .
5150

52-
docker-build-gpu:
53-
docker build --file=Dockerfile.gpu -t ${IMAGE}-gpu:${VERSION} .
54-
5551
docker-push:
5652
docker push ${IMAGE}:${VERSION}
5753

58-
docker-push-gpu:
59-
docker push ${IMAGE}-gpu:${VERSION}
60-
6154
kind_load: docker-build
6255
kind load docker-image ${IMAGE}:${VERSION} --name ${KIND_NAME}
6356

@@ -78,7 +71,6 @@ clean:
7871
#
7972
# Test Tabular Explanations
8073
#
81-
8274
test_models/sklearn/iris:
8375
mkdir -p test_models/sklearn/iris
8476
gsutil cp -r gs://seldon-models/v1.11.0-dev/sklearn/iris test_models/sklearn
@@ -88,7 +80,7 @@ test_models/explainers/anchor_tabular:
8880
python tests/make_test_models.py --model anchor_tabular --model_dir test_models/explainers/anchor_tabular
8981

9082
anchor_tabular_model: test_models/sklearn/iris
91-
docker run -it --rm --name "sklearnserver" -p 9000:9000 -v ${PWD}/test_models:/models -e PREDICTIVE_UNIT_PARAMETERS='[{"type":"STRING","name":"model_uri","value":"/models/sklearn/iris"}]' ${REPO}/sklearnserver:${VERSION}
83+
docker run -it --rm --name "sklearnserver" -p 9000:9000 -v ${PWD}/test_models:/models -e PREDICTIVE_UNIT_PARAMETERS='[{"type":"STRING","name":"model_uri","value":"/models/sklearn/iris"}]' ${DOCKER_REGISTRY}/sklearnserver:${VERSION}
9284

9385
anchor_tabular_predict:
9486
curl -d '{"data": {"ndarray":[[5.964, 4.006, 2.081, 1.031]]}}' -X POST http://localhost:9000/api/v1.0/predictions -H "Content-Type: application/json"
@@ -102,19 +94,15 @@ anchor_tabular_docker: test_models/explainers/anchor_tabular
10294
anchor_tabular_explain:
10395
curl -d '{"data": {"ndarray":[[5.964, 4.006, 2.081, 1.031]]}}' -X POST http://localhost:8080/api/v1.0/explain -H "Content-Type: application/json"
10496

105-
10697
#
10798
# Test Text Explanations
10899
#
109-
110-
111100
test_models/sklearn/moviesentiment:
112101
mkdir -p test_models/sklearn
113102
gsutil cp -r gs://seldon-models/sklearn/moviesentiment_sklearn_0.24.2 test_models/sklearn
114103

115104
anchor_text_model: test_models/sklearn/moviesentiment
116-
docker run -it --rm --name "sklearnserver" -p 9000:9000 -v ${PWD}/test_models:/models -e PREDICTIVE_UNIT_PARAMETERS='[{"type":"STRING","name":"model_uri","value":"/models/sklearn/moviesentiment_sklearn_0.24.2"}]' ${REPO}/sklearnserver:${VERSION}
117-
105+
docker run -it --rm --name "sklearnserver" -p 9000:9000 -v ${PWD}/test_models:/models -e PREDICTIVE_UNIT_PARAMETERS='[{"type":"STRING","name":"model_uri","value":"/models/sklearn/moviesentiment_sklearn_0.24.2"}]' ${DOCKER_REGISTRY}/sklearnserver:${VERSION}
118106

119107
anchor_text_predict:
120108
curl -d '{"data": {"ndarray":["a visually exquisite but narratively opaque and emotionally vapid experience of style and mystification"]}}' -X POST http://localhost:9000/api/v1.0/predictions -H "Content-Type: application/json"
@@ -128,11 +116,9 @@ anchor_text_docker:
128116
anchor_text_explain:
129117
curl -d '{"data": {"ndarray":["a visually exquisite but narratively opaque and emotionally vapid experience of style and mystification"]}}' -X POST http://localhost:8080/api/v1.0/explain -H "Content-Type: application/json"
130118

131-
132119
#
133120
# Test Image Explanation
134121
#
135-
136122
test_models/tfserving/cifar10/resnet32:
137123
mkdir -p test_models/tfserving/cifar10
138124
gsutil cp -r gs://seldon-models/tfserving/cifar10/resnet32 test_models/tfserving/cifar10
@@ -144,11 +130,9 @@ test_models/explainers/anchor_image:
144130
anchor_images_model: test_models/tfserving/cifar10/resnet32
145131
docker run --name tfserver -it --rm -p 8501:8501 -p 8500:8500 -v "${PWD}/test_models/tfserving/cifar10:/models" -e MODEL_NAME=resnet32 tensorflow/serving
146132

147-
148133
anchor_images_predict:
149134
curl -d @./tests/data/input.json -X POST http://localhost:8501/v1/models/resnet32:predict -H "Content-Type: application/json"
150135

151-
152136
anchor_images: test_models/explainers/anchor_image
153137
python -m alibiexplainer --model_name resnet32 --protocol tensorflow.http --storage_uri "${PWD}/test_models/explainers/anchor_image" --predictor_host localhost:8501 AnchorImages
154138

@@ -158,12 +142,9 @@ anchor_images_docker: test_models/explainers/anchor_image
158142
anchor_images_explain:
159143
curl -d @./tests/data/input.json -X POST http://localhost:8080/v1/models/resnet32:explain -H "Content-Type: application/json"
160144

161-
162145
#
163146
# Test Kernel Shap Explanation
164147
#
165-
166-
167148
test_models/sklearn/wine/model-py36-0.23.2:
168149
mkdir -p test_models/sklearn/wine
169150
gsutil cp -r gs://seldon-models/sklearn/wine/model-py36-0.23.2 test_models/sklearn/wine
@@ -173,24 +154,20 @@ test_models/explainers/kernel_shap:
173154
python tests/make_test_models.py --model kernel_shap --model_dir test_models/explainers/kernel_shap
174155

175156
kernel_shap_model: test_models/sklearn/wine/model-py36-0.23.2
176-
docker run -it --rm --name "sklearnserver" -p 9000:9000 -v ${PWD}/test_models:/models -e PREDICTIVE_UNIT_PARAMETERS='[{"type":"STRING","name":"model_uri","value":"/models/sklearn/wine/model-py36-0.23.2"},{"type":"STRING","name":"method","value":"decision_function"}]' ${REPO}/sklearnserver:${VERSION}
157+
docker run -it --rm --name "sklearnserver" -p 9000:9000 -v ${PWD}/test_models:/models -e PREDICTIVE_UNIT_PARAMETERS='[{"type":"STRING","name":"model_uri","value":"/models/sklearn/wine/model-py36-0.23.2"},{"type":"STRING","name":"method","value":"decision_function"}]' ${DOCKER_REGISTRY}/sklearnserver:${VERSION}
177158

178159
kernel_shap_predict:
179160
curl -d '{"data": {"ndarray":[[-0.24226334, 0.26757916, 0.42085937, 0.7127641 , 0.84067236, -1.27747161, -0.60582812, -0.9706341 , -0.5873972 , 2.42611713, -2.06608025, -1.55017035, -0.86659858]]}}' -X POST http://localhost:9000/api/v1.0/predictions -H "Content-Type: application/json"
180161

181-
182162
kernel_shap: test_models/explainers/kernel_shap
183163
python -m alibiexplainer --model_name wine --protocol seldon.http --storage_uri "${PWD}/test_models/explainers/kernel_shap" --predictor_host localhost:9000 KernelShap
184164

185-
186165
kernel_shap_docker: test_models/explainers/kernel_shap
187166
docker run -it --rm --name "explainer" --network=host -p 8080:8080 -v ${PWD}/test_models:/models ${IMAGE}:${VERSION} --model_name wine --protocol seldon.http --storage_uri /models/explainers/kernel_shap --predictor_host localhost:9000 KernelShap
188167

189-
190168
kernel_shap_explain:
191169
curl -d '{"data": {"ndarray":[[-0.24226334, 0.26757916, 0.42085937, 0.7127641 , 0.84067236, -1.27747161, -0.60582812, -0.9706341 , -0.5873972 , 2.42611713, -2.06608025, -1.55017035, -0.86659858]]}}' -X POST http://localhost:8080/api/v1.0/explain -H "Content-Type: application/json"
192170

193-
194171
#
195172
# Test Integrated Gradients
196173
#
@@ -201,15 +178,12 @@ test_models/keras/imdb:
201178
integrated_gradients: test_models/keras/imdb
202179
python -m alibiexplainer --model_name imdb --protocol seldon.http --storage_uri ${PWD}/test_models/keras/imdb IntegratedGradients IntegratedGradients --layer 1
203180

204-
205181
integrated_gradients_docker: test_models/keras/imdb
206182
docker run -it --rm --name "explainer" --network=host -p 8080:8080 -v ${PWD}/test_models:/models ${IMAGE}:${VERSION} --model_name adult --protocol seldon.http --storage_uri /models/keras/imdb IntegratedGradients --layer 1
207183

208184
integrated_gradients_explain:
209185
curl -d '{"data": {"ndarray":[[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 591, 202, 14, 31, 6, 717, 10, 10, 2, 2, 5, 4, 360, 7, 4, 177, 5760, 394, 354, 4, 123, 9, 1035, 1035, 1035, 10, 10, 13, 92, 124, 89, 488, 7944, 100, 28, 1668, 14, 31, 23, 27, 7479, 29, 220, 468, 8, 124, 14, 286, 170, 8, 157, 46, 5, 27, 239, 16, 179, 2, 38, 32, 25, 7944, 451, 202, 14, 6, 717]]}}' -X POST http://localhost:8080/api/v1.0/explain -H "Content-Type: application/json"
210186

211-
212-
213187
#
214188
# Test Tree Shap
215189
# This is an odd case where we do not need a separate model (yet)
@@ -240,50 +214,16 @@ test_models/explainers/ale:
240214
python tests/make_test_models.py --model ale --model_dir test_models/explainers/ale
241215

242216
ale_model: test_models/sklearn/iris-0.23.2/lr_model
243-
docker run -it --rm --name "sklearnserver" -p 9000:9000 -v ${PWD}/test_models:/models -e PREDICTIVE_UNIT_PARAMETERS='[{"type":"STRING","name":"model_uri","value":"/models/sklearn/iris-0.23.2/lr_model"},{"type":"STRING","name":"method","value":"decision_function"}]' ${REPO}/sklearnserver:${VERSION}
217+
docker run -it --rm --name "sklearnserver" -p 9000:9000 -v ${PWD}/test_models:/models -e PREDICTIVE_UNIT_PARAMETERS='[{"type":"STRING","name":"model_uri","value":"/models/sklearn/iris-0.23.2/lr_model"},{"type":"STRING","name":"method","value":"decision_function"}]' ${DOCKER_REGISTRY}/sklearnserver:${VERSION}
244218

245219
ale_predict:
246220
curl -d '{"data": {"ndarray":[[6.1, 2.8, 4.7, 1.2]]}}' -X POST http://localhost:9000/api/v1.0/predictions -H "Content-Type: application/json"
247221

248-
249222
ale: test_models/explainers/ale
250223
python -m alibiexplainer --model_name iris --protocol seldon.http --storage_uri "${PWD}/test_models/explainers/ale" --predictor_host localhost:9000 ALE
251224

252-
253225
ale_docker: test_models/explainers/ale
254226
docker run -it --rm --name "explainer" --network=host -p 8080:8080 -v ${PWD}/test_models:/models ${IMAGE}:${VERSION} --model_name iris --protocol seldon.http --storage_uri /models/explainers/ale --predictor_host localhost:9000 ALE
255227

256-
257228
ale_explain:
258229
curl -d '{"data": {"ndarray":[[6.1, 2.8, 4.7, 1.2]]}}' -X POST http://localhost:8080/api/v1.0/explain -H "Content-Type: application/json"
259-
260-
261-
262-
#
263-
# Test Triton Cifar10
264-
#
265-
# TODO: move the below in relevant place
266-
# as we are moving to mlserver alibi runtime for V2 protocol
267-
268-
#test_models/triton/cifar10/tf_cifar10:
269-
# mkdir -p test_models/triton/tf_cifar10
270-
# gsutil cp -r gs://seldon-models/triton/tf_cifar10 test_models/triton
271-
#
272-
#
273-
#anchor_images_triton_model: test_models/triton/cifar10/tf_cifar10
274-
# docker run --rm --shm-size=1g --ulimit memlock=-1 --ulimit stack=67108864 -p9000:9000 -p8001:8001 -p8002:8002 -p5001:5001 -v ${PWD}/test_models/triton/tf_cifar10:/models nvcr.io/nvidia/tritonserver:21.08-py3 /opt/tritonserver/bin/tritonserver --model-repository=/models --http-port=9000 --grpc-port=5001
275-
#
276-
#anchor_images_triton_predict:
277-
# curl -H "Content-Type: application/json" http://0.0.0.0:9000/v2/models/cifar10/infer -d '@tests/data/truck-v2.json'
278-
#
279-
#
280-
#anchor_images_triton: test_models/explainers/anchor_image
281-
# python -m alibiexplainer --model_name cifar10 --protocol kfserving.http --storage_uri ${PWD}/explainers/anchor_image --predictor_host localhost:9000 AnchorImages
282-
#
283-
#
284-
#anchor_images_triton_docker: test_models/explainers/anchor_image
285-
# docker run -it --rm --name "explainer" --network=host -p 8080:8080 -v ${PWD}/test_models:/models ${IMAGE}:${VERSION} --model_name cifar10 --protocol kfserving.http --storage_uri /models/explainers/anchor_image --predictor_host localhost:9000 AnchorImages
286-
#
287-
#
288-
#anchor_images_triton_explain:
289-
# curl -d @tests/data/truck-v2.json -X POST http://localhost:8080/v2/models/cifar10/explain -H "Content-Type: application/json"

0 commit comments

Comments
 (0)