Skip to content
Open
Show file tree
Hide file tree
Changes from 63 commits
Commits
Show all changes
71 commits
Select commit Hold shift + click to select a range
86b224e
Logistic Regression-patch2
darksapien23151 Jun 10, 2025
8921c90
Merge branch 'main' into logreg-patch1
darksapien23151 Jun 13, 2025
297909d
Update extras/iris_pipeline_project/model_config.json
darksapien23151 Jun 26, 2025
620ae60
Update extras/iris_pipeline_project/pipeline/ovmsmodel.py
darksapien23151 Jun 26, 2025
e5af86b
Update model_config.json
darksapien23151 Jun 26, 2025
96c1953
Update Dockerfile
darksapien23151 Jun 26, 2025
e64e7a1
Update graph.pbtxt
darksapien23151 Jun 26, 2025
5e1439e
Update ovmsmodel.py
darksapien23151 Jun 26, 2025
930aaa8
Create .gitignore
darksapien23151 Jun 27, 2025
ca366ca
Delete extras/iris_pipeline_project/data directory
darksapien23151 Jun 27, 2025
8a3097d
Update .gitignore
darksapien23151 Jun 28, 2025
3cdf97c
Add files via upload
darksapien23151 Jun 28, 2025
f938740
Removed data folder
hvssmanne Jun 28, 2025
b28a63e
Create README.md
darksapien23151 Jul 1, 2025
e7a6f1f
Update client_train.py
darksapien23151 Jul 1, 2025
d70052e
Update ovmsmodel.py
darksapien23151 Jul 3, 2025
0bf9bee
Update ovmsmodel.py
darksapien23151 Jul 3, 2025
1053f5f
Update README.md
darksapien23151 Jul 3, 2025
860a889
Update Dockerfile
darksapien23151 Jul 3, 2025
f56008b
Update .gitignore
darksapien23151 Jul 6, 2025
dc5f52e
Update client_train.py
darksapien23151 Jul 7, 2025
66d5966
Update client_inference.py
darksapien23151 Jul 7, 2025
0b6cbc0
Update ovmsmodel.py
darksapien23151 Jul 7, 2025
daaf522
Update ovmsmodel.py
darksapien23151 Jul 8, 2025
3dd60cc
Modified files and removed model/
hvssmanne Jul 9, 2025
a783d99
Merge branch 'openvinotoolkit:main' into logreg-patch1
darksapien23151 Jul 9, 2025
59031cf
Merge remote-tracking branch 'origin/logreg-patch1' and resolve confl…
hvssmanne Jul 9, 2025
c49bf22
Update README.md
darksapien23151 Jul 9, 2025
a31c177
Update README.md
darksapien23151 Jul 9, 2025
59a66a0
Update README.md
darksapien23151 Jul 9, 2025
b1e6ad0
Update README.md
darksapien23151 Jul 9, 2025
36689ab
Create data_preprocess.py
darksapien23151 Jul 9, 2025
f2049be
Update .gitignore
darksapien23151 Jul 9, 2025
ffcb534
Update ovmsmodel.py
darksapien23151 Jul 9, 2025
7c00c27
Update ovmsmodel.py
darksapien23151 Jul 13, 2025
bc42ce7
Update Dockerfile
darksapien23151 Jul 13, 2025
f102453
Update ovmsmodel.py
darksapien23151 Jul 13, 2025
82d5f0c
Update client_train.py
darksapien23151 Jul 13, 2025
8821147
Add files via upload
darksapien23151 Jul 13, 2025
a614ae5
Update extras/iris_pipeline_project/Dockerfile
darksapien23151 Jul 15, 2025
49ee303
Update .gitignore
darksapien23151 Jul 15, 2025
1f1b8b4
Update ovmsmodel.py
darksapien23151 Jul 15, 2025
b61cbe1
Update data_preprocess.py
darksapien23151 Jul 15, 2025
4c68c8b
Update data_preprocess.py
darksapien23151 Jul 15, 2025
53a79b1
Merge branch 'main' into logreg-patch1
darksapien23151 Jul 15, 2025
60db5be
Update client_inference.py
darksapien23151 Jul 15, 2025
c16a94c
Update client_train.py
darksapien23151 Jul 15, 2025
c2174e9
Update README.md
darksapien23151 Jul 15, 2025
1a21d72
Update README.md
darksapien23151 Jul 15, 2025
17f77b7
Update README.md
darksapien23151 Jul 15, 2025
d0b01cf
Update Dockerfile
darksapien23151 Jul 16, 2025
43b985e
Update client_train.py
darksapien23151 Jul 23, 2025
f616db7
Update ovmsmodel.py
darksapien23151 Jul 23, 2025
701ef29
Create labelmap.json
darksapien23151 Jul 23, 2025
b76cbda
Update model_config.json
darksapien23151 Jul 23, 2025
6737047
Update client_inference.py
darksapien23151 Jul 26, 2025
7845c64
Update ovmsmodel.py
darksapien23151 Jul 26, 2025
e1dd8cb
Update Dockerfile
darksapien23151 Jul 28, 2025
2c28d8b
Update Dockerfile
darksapien23151 Aug 15, 2025
35aada3
Create model.py
darksapien23151 Aug 15, 2025
4b9784b
Update ovmsmodel.py
darksapien23151 Aug 15, 2025
0c1202f
Add hyperparameters
hvssmanne Aug 30, 2025
11e3673
Add KMeans support
hvssmanne Aug 30, 2025
02915c8
Update README.md
darksapien23151 Sep 1, 2025
bd2e780
Update README.md
darksapien23151 Sep 1, 2025
e566762
Update README.md
darksapien23151 Sep 1, 2025
79e0d3b
Update extras/iris_pipeline_project/pipeline/ovmsmodel.py
darksapien23151 Sep 1, 2025
95447f3
Update extras/iris_pipeline_project/pipeline/model.py
darksapien23151 Sep 1, 2025
b7db421
Update extras/iris_pipeline_project/client/client_inference.py
darksapien23151 Sep 1, 2025
040f0c5
Update extras/iris_pipeline_project/client/client_inference.py
darksapien23151 Sep 1, 2025
9f7d3a0
Update README.md
darksapien23151 Sep 1, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions extras/iris_pipeline_project/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
data/
model/
23 changes: 23 additions & 0 deletions extras/iris_pipeline_project/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
FROM openvino/model_server:latest

USER root

ENV LD_LIBRARY_PATH=/ovms/lib
ENV PYTHONPATH=/ovms/lib/python

RUN apt-get update && apt-get install -y python3-pip
RUN pip install --no-cache-dir --break-system-packages \
pandas numpy scikit-learn joblib skl2onnx onnx onnxruntime \
scikit-learn-intelex==2025.7.0 \
tritonclient[all]
RUN python3 -m pip install --no-cache-dir --break-system-packages \
torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu && \
python3 -m pip install --no-cache-dir --break-system-packages \
intel-extension-for-pytorch oneccl_bind_pt \
--extra-index-url https://pytorch-extension.intel.com/release-whl/stable/cpu/us/

RUN rm /ovms/lib/libtbb.so* && cp /usr/local/lib/libtbb.so* /ovms/lib/

ENTRYPOINT ["/ovms/bin/ovms"]


126 changes: 126 additions & 0 deletions extras/iris_pipeline_project/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
# OVMS Iris Pipeline Example

This repository demonstrates how to use OpenVINO Model Server (OVMS) with a custom Mediapipe pipeline for the Iris dataset, including both model training and inference through a Python client.

---

## Step 1: Clone the Repository

```bash
git clone https://github.com/openvinotoolkit/model_server.git
cd model_server/extras/iris_pipeline_project
```
---


## Step 2: Build and Run OVMS Docker Image

### 2.1. Build the Docker Image

```bash
docker build --no-cache -t iris_logisticreg_ovms .
```

### 2.2. Run the OVMS Container

```bash
docker run --rm -it -v "$PWD:/workspace" -p 9000:9000 -p 8000:8000 iris_logisticreg_ovms --config_path /workspace/model_config.json --port 9000 --rest_port 8000
```
- **Note:** Adjust `$(pwd)` if you are running from a different working directory.

---

## Step 3: Project Structure

```
client/
├── client_inference.py
└── client_train.py
pipeline/
├── __pycache__/
├── graph.pbtxt
└── ovmsmodel.py
Dockerfile
model_config.json
```

---

## Step 4: Run Training and Inference

### 4.1. Training

```bash
python client/client_train.py train <path_to_training_dataset>
```

### 4.2. Inference

```bash
python client/client_inference.py infer <path_to_test_dataset>
```

---

## Instructions for preparing the data
Run the command to download the Iris dataset, which is taken to be the hello-world dataset of classification datasets.

```bash
curl -o iris.csv https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data
```
Run the following file to prepare the data and split it into data for training and for inferencing.

```bash
python data_preprocess.py <path to output_dir>
```

## Input Format

The pipeline expects input as a JSON object, sent as a single-element numpy array of bytes (`dtype=object`):

```json
{
"mode": "train" | "infer",
"data": "<CSV string>"
}
```

---

## Troubleshooting

- **Logs:**
For debugging, check OVMS container logs:
```bash
docker logs iris_logisticreg_ovms
```
- **Code Changes:**
After editing `pipeline/ovmsmodel.py`, **restart the OVMS container** for changes to take effect.

- **If nothing prints from your Python node:**
- Use `flush=True` in your print statements.
- Print to `sys.stderr`.
- Try writing to a file inside the container for debug.

---

## Example Output
For Training:

```
Read CSV file successfully
Training mode detected. Preparing data for training...
Connected to OVMS at localhost:9000
Server response decoded: string - [...]
The output string formatted as: [<1 - Model trained successfully | 0 - Otherwise> <Accuracy> <Precision> <Recall> <f1-score>]
```
For Inference:

```
Read CSV file successfully
Inference mode detected.
Inference predictions: [...]

```

---
210 changes: 210 additions & 0 deletions extras/iris_pipeline_project/client/client_inference.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,210 @@
import numpy as np
import tritonclient.grpc as grpcclient
import pandas as pd
import json
import sys
import os
import matplotlib.pyplot as plt
import numpy as np
SERVER_URL = "localhost:9000"
MODEL_NAME = "pipeline"

def print_usage():
print("Usage: python client_inference.py infer <path_to_csv> [--target_column <column>] [--encode <col1,col2,...>] [--model_class <ModelClassName>]")
sys.exit(1)

def main():
if len(sys.argv) < 3 or sys.argv[1] != "infer":
print_usage()

mode = sys.argv[1]
csv_path = sys.argv[2]
target_column = None
encode_cols = []
model_class_name = "LogisticRegressionTorch"

for i in range(3, len(sys.argv)):
if sys.argv[i] == "--target_column" and i+1 < len(sys.argv):
target_column = sys.argv[i+1]
elif sys.argv[i] == "--encode" and i+1 < len(sys.argv):
encode_cols = sys.argv[i+1].split(",")
elif sys.argv[i] == "--model_class" and i+1 < len(sys.argv):
model_class_name = sys.argv[i+1]

if "KMeans" in model_class_name or (target_column and target_column.lower() == "none"):
target_column = None

if not os.path.isfile(csv_path):
print(f"ERROR: Could not find CSV file: {csv_path}")
sys.exit(1)

try:
df = pd.read_csv(csv_path)
print("CSV loaded successfully. kmeans")
except Exception as e:
print(f"ERROR: Failed to read CSV: {e}")
sys.exit(1)

if "KMeans" not in model_class_name and target_column and target_column in df.columns:
df = df.drop(columns=[target_column])

if encode_cols:
for col in encode_cols:
if col in df.columns:
from sklearn.preprocessing import LabelEncoder
le = LabelEncoder()
df[col] = le.fit_transform(df[col])
else:
print(f"WARNING: Encode column '{col}' not found in CSV")

X = df.values

payload = {
"mode": mode,
"X": X.tolist(),
"y": None,
"params": {},
"model_class": model_class_name
}

input_bytes = json.dumps(payload).encode("utf-8")
pipeline_input = np.array([input_bytes], dtype=object)

try:
client = grpcclient.InferenceServerClient(url=SERVER_URL)
print(f"Connected to OVMS at {SERVER_URL}")
except Exception as e:
print(f"ERROR: Could not connect to OVMS at {SERVER_URL}: {e}")
sys.exit(1)

infer_input = grpcclient.InferInput("pipeline_input", pipeline_input.shape, "BYTES")
infer_input.set_data_from_numpy(pipeline_input)

try:
response = client.infer(
model_name=MODEL_NAME,
inputs=[infer_input]
)

result = response.as_numpy("pipeline_output")

if result.dtype == object:
for item in result:
if isinstance(item, (bytes, bytearray)):
item = item.decode("utf-8")
parsed = json.loads(item)
print("Cluster assignments:", parsed["labels"])
print("Cluster centroids:", parsed["centroids"])
else:
pass

if result.dtype in [np.float64, np.float32]:
raw_bytes = result.view(np.uint8).tobytes()
decoded = raw_bytes.decode("utf-8", errors="ignore").strip("\x00")
decoded_items = [decoded]


print("DEBUG result type:", type(result))
if isinstance(result, np.ndarray):
print("DEBUG result dtype:", result.dtype)
print("DEBUG result shape:", result.shape)

if "KMeans" in model_class_name:
decoded_items = []
if isinstance(result, np.ndarray):
if result.dtype == object:
decoded_items = result
elif result.dtype in [np.float64, np.float32]:
raw_bytes = result.tobytes()
try:
decoded = raw_bytes.decode("utf-8").strip("\x00")
decoded_items = [decoded]
except Exception as e:
pass

for item in decoded_items:
if isinstance(item, (bytes, bytearray)):
item = item.decode()
try:
response_data = json.loads(item)
except Exception:
response_data = item

print("KMeans clustering result:")
if isinstance(response_data, dict) and "labels" in response_data and "centroids" in response_data:
labels = response_data["labels"]
centroids = np.array(response_data["centroids"])

print("Cluster assignments:", labels[:20], "...")
print("Cluster centroids:")
for i, centroid in enumerate(centroids):
print(f" Centroid {i}: {centroid}")

X = df.drop(columns=[target_column]) if target_column else df
X = X.values
plt.figure(figsize=(8, 6))
plt.scatter(X[:, 0], X[:, 1], c=labels, cmap="viridis", s=30, alpha=0.7, label="Points")
plt.scatter(centroids[:, 0], centroids[:, 1], c="red", s=200, marker="X", label="Centroids")
plt.title("KMeans Clustering Result")
plt.xlabel("Feature 1")
plt.ylabel("Feature 2")
plt.legend()
plt.show()
else:
print("Unexpected response:", response_data)

elif isinstance(result, np.ndarray) and result.dtype == object:
print("Server responded with object array.")
for item in result:
if isinstance(item, (bytes, bytearray)):
try:
item = item.decode()
except Exception:
pass
try:
response_data = json.loads(item)
except Exception:
response_data = item
if isinstance(response_data, list):
for entry in response_data:
label = entry.get("label")
probs = entry.get("probabilities", {})
print(f"Prediction: {label}")
print("Probabilities:")
for k, v in probs.items():
print(f" {k}: {v:.4f}")
print("-" * 30)
elif isinstance(response_data, dict):
label = response_data.get("label")
probs = response_data.get("probabilities", {})
print(f"Prediction: {label}")
print("Probabilities:")
for k, v in probs.items():
print(f" {k}: {v:.4f}")
print("-" * 30)
else:
print(response_data)
elif isinstance(result, (bytes, bytearray)):
try:
decoded = result.decode()
print("Decoded result:", decoded)
try:
response_data = json.loads(decoded)
print(json.dumps(response_data, indent=2))
except Exception:
print(decoded)
except Exception:
print("Raw bytes result:", result)
elif isinstance(result, np.ndarray) and result.dtype in [np.float32, np.float64]:
print("Server responded with numeric array.")
print("Values:", result)
else:
print("Server response (raw):", str(result))


except Exception as e:
print(f"ERROR: Inference call failed: {e}")
sys.exit(1)

if __name__ == "__main__":
main()
Loading