diff --git a/extras/iris_pipeline_project/.gitignore b/extras/iris_pipeline_project/.gitignore new file mode 100644 index 0000000000..5ac07612fb --- /dev/null +++ b/extras/iris_pipeline_project/.gitignore @@ -0,0 +1,2 @@ +data/ +model/ diff --git a/extras/iris_pipeline_project/Dockerfile b/extras/iris_pipeline_project/Dockerfile new file mode 100644 index 0000000000..5d0dd8ef5f --- /dev/null +++ b/extras/iris_pipeline_project/Dockerfile @@ -0,0 +1,23 @@ +FROM openvino/model_server:latest + +USER root + +ENV LD_LIBRARY_PATH=/ovms/lib +ENV PYTHONPATH=/ovms/lib/python + +RUN apt-get update && apt-get install -y python3-pip +RUN pip install --no-cache-dir --break-system-packages \ + pandas numpy scikit-learn joblib skl2onnx onnx onnxruntime \ + scikit-learn-intelex==2025.7.0 \ + tritonclient[all] +RUN python3 -m pip install --no-cache-dir --break-system-packages \ + torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu && \ + python3 -m pip install --no-cache-dir --break-system-packages \ + intel-extension-for-pytorch oneccl_bind_pt \ + --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/cpu/us/ + +RUN rm /ovms/lib/libtbb.so* && cp /usr/local/lib/libtbb.so* /ovms/lib/ + +ENTRYPOINT ["/ovms/bin/ovms"] + + diff --git a/extras/iris_pipeline_project/README.md b/extras/iris_pipeline_project/README.md new file mode 100644 index 0000000000..c25350d9ec --- /dev/null +++ b/extras/iris_pipeline_project/README.md @@ -0,0 +1,182 @@ +# OVMS Iris Pipeline Example + +This repository demonstrates how to use OpenVINO Model Server (OVMS) with a custom Mediapipe pipeline for the Iris dataset, including both model training and inference through a Python client. +At the moment, it supports Logistic regression and KMeans. + +--- + +## Step 1: Clone the Repository + +```bash +git clone https://github.com/openvinotoolkit/model_server.git +cd model_server/extras/iris_pipeline_project +``` +--- + + +## Step 2: Build and Run OVMS Docker Image + +### 2.1. Build the Docker Image + +```bash +docker build --no-cache -t prototype_iris . +``` + +### 2.2. Run the OVMS Container + +```bash + docker run --rm -it -v $(pwd):/workspace -p 9000:9000 prototype_iris --config_path /workspace/model_config.json --port 9000 --log_level DEBUG +``` +- **Note:** Adjust `$(pwd)` if you are running from a different working directory. + +--- + +## Step 3: Project Structure + +``` +client/ + ├── client_inference.py + └── client_train.py +data_folder/ + ├── iris_train.csv + └── iris_test.csv +pipeline/ + ├── __pycache__/ + ├── graph.pbtxt + ├── model.py + └── ovmsmodel.py +Dockerfile +model_config.json +kmeans_params.json +hyperparams.json + +``` + +--- + +## Step 4: Run Training and Inference + +### 4.1. Training + +```bash +python client/client_train.py train iris_train.csv Species --params hyperparams.json --encode Species --model_class LogisticRegressionTorch + +python client/client_train.py train iris_train.csv Species --params kmeans_params.json --encode Species --model_class KMeansSkLearn + + +``` + +### 4.2. Inference + +```bash +python client/client_inference.py infer data_folder/iris_test.csv --target_column Species --model_class LogisticRegressionTorch + +python client/client_inference.py infer iris_train_nolabel.csv --target_column Species --model_class KMeansSkLearn + +``` + +--- + +For Enabling accelerator support: +Manually set the ```bool - (use_ipex/use_oneDAL)``` in model.py file under "pipeline" directory to either True/False depending on the necessity. + +## Instructions for preparing the data +Run the command to download the Iris dataset, which is taken to be the hello-world dataset of classification datasets. + +```bash +curl -o iris.csv https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data +``` + + +Command-Line Usage + +The training and inference client supports flexible options for both Logistic Regression and KMeans models. + +Usage +python client/client_train.py \ + [--params ] [--encode ] [--model_class ] + +Arguments + + train|infer + Mode of operation. + + train: Train a new model with the provided dataset. + + infer: Run inference using a trained model. + + + Path to the dataset in CSV format. + + + + For classification (Logistic Regression): name of the target column. + + For clustering (KMeans): use NONE. + + --params (optional) + Path to a JSON file containing model hyperparameters. + Example: + +{ + "max_iter": 300, + "solver": "lbfgs", + "random_state": 42, + "n_clusters": 3 +} + + If not provided, default parameters are used. + +--encode (optional) +Comma-separated list of categorical column names to encode. + + Encoding can also be performed client-side before sending data to the server. + + If omitted, no encoding is applied. + +--model_class (optional) +Specify the model class explicitly (e.g., LogisticRegression, KMeans). + + Defaults are inferred from the mode and target column. + +--- + +## Troubleshooting + +- **Logs:** + For debugging, check OVMS container logs: + ```bash + docker logs prototype_iris + ``` +- **Code Changes:** + After editing `pipeline/ovmsmodel.py`, **restart the OVMS container** for changes to take effect. + +- **If nothing prints from your Python node:** + - Use `flush=True` in your print statements. + - Print to `sys.stderr`. + - Try writing to a file inside the container for debug. + +--- + +## Example Output +For Training: + +``` +Read CSV file successfully +Training mode detected. Preparing data for training... +Connected to OVMS at localhost:9000 +Model trained successfully + +``` +For Inference: + +``` +Read CSV file successfully +Inference mode detected. +Inference predictions: [...] + +``` + +--- + +NOTE: Cluster assignments and centroid details are available in the container logs. Since the terminal is non-GUI, .show() visualization is not supported. diff --git a/extras/iris_pipeline_project/client/client_inference.py b/extras/iris_pipeline_project/client/client_inference.py new file mode 100644 index 0000000000..e9ef62e352 --- /dev/null +++ b/extras/iris_pipeline_project/client/client_inference.py @@ -0,0 +1,209 @@ +import numpy as np +import tritonclient.grpc as grpcclient +import pandas as pd +import json +import sys +import os +import matplotlib.pyplot as plt +SERVER_URL = "localhost:9000" +MODEL_NAME = "pipeline" + +def print_usage(): + print("Usage: python client_inference.py infer [--target_column ] [--encode ] [--model_class ]") + sys.exit(1) + +def main(): + if len(sys.argv) < 3 or sys.argv[1] != "infer": + print_usage() + + mode = sys.argv[1] + csv_path = sys.argv[2] + target_column = None + encode_cols = [] + model_class_name = "LogisticRegressionTorch" + + for i in range(3, len(sys.argv)): + if sys.argv[i] == "--target_column" and i+1 < len(sys.argv): + target_column = sys.argv[i+1] + elif sys.argv[i] == "--encode" and i+1 < len(sys.argv): + encode_cols = sys.argv[i+1].split(",") + elif sys.argv[i] == "--model_class" and i+1 < len(sys.argv): + model_class_name = sys.argv[i+1] + + if "KMeans" in model_class_name or (target_column and target_column.lower() == "none"): + target_column = None + + if not os.path.isfile(csv_path): + print(f"ERROR: Could not find CSV file: {csv_path}") + sys.exit(1) + + try: + df = pd.read_csv(csv_path) + print("CSV loaded successfully. kmeans") + except Exception as e: + print(f"ERROR: Failed to read CSV: {e}") + sys.exit(1) + + if "KMeans" not in model_class_name and target_column and target_column in df.columns: + df = df.drop(columns=[target_column]) + + if encode_cols: + for col in encode_cols: + if col in df.columns: + from sklearn.preprocessing import LabelEncoder + le = LabelEncoder() + df[col] = le.fit_transform(df[col]) + else: + print(f"WARNING: Encode column '{col}' not found in CSV") + + X = df.values + + payload = { + "mode": mode, + "X": X.tolist(), + "y": None, + "params": {}, + "model_class": model_class_name + } + + input_bytes = json.dumps(payload).encode("utf-8") + pipeline_input = np.array([input_bytes], dtype=object) + + try: + client = grpcclient.InferenceServerClient(url=SERVER_URL) + print(f"Connected to OVMS at {SERVER_URL}") + except Exception as e: + print(f"ERROR: Could not connect to OVMS at {SERVER_URL}: {e}") + sys.exit(1) + + infer_input = grpcclient.InferInput("pipeline_input", pipeline_input.shape, "BYTES") + infer_input.set_data_from_numpy(pipeline_input) + + try: + response = client.infer( + model_name=MODEL_NAME, + inputs=[infer_input] + ) + + result = response.as_numpy("pipeline_output") + + if result.dtype == object: + for item in result: + if isinstance(item, (bytes, bytearray)): + item = item.decode("utf-8") + parsed = json.loads(item) + print("Cluster assignments:", parsed["labels"]) + print("Cluster centroids:", parsed["centroids"]) + else: + pass + + if result.dtype in [np.float64, np.float32]: + raw_bytes = result.view(np.uint8).tobytes() + decoded = raw_bytes.decode("utf-8", errors="ignore").strip("\x00") + decoded_items = [decoded] + + + print("DEBUG result type:", type(result)) + if isinstance(result, np.ndarray): + print("DEBUG result dtype:", result.dtype) + print("DEBUG result shape:", result.shape) + + if "KMeans" in model_class_name: + decoded_items = [] + if isinstance(result, np.ndarray): + if result.dtype == object: + decoded_items = result + elif result.dtype in [np.float64, np.float32]: + raw_bytes = result.tobytes() + try: + decoded = raw_bytes.decode("utf-8").strip("\x00") + decoded_items = [decoded] + except Exception as e: + pass + + for item in decoded_items: + if isinstance(item, (bytes, bytearray)): + item = item.decode() + try: + response_data = json.loads(item) + except Exception: + response_data = item + + print("KMeans clustering result:") + if isinstance(response_data, dict) and "labels" in response_data and "centroids" in response_data: + labels = response_data["labels"] + centroids = np.array(response_data["centroids"]) + + print("Cluster assignments:", labels[:20], "...") + print("Cluster centroids:") + for i, centroid in enumerate(centroids): + print(f" Centroid {i}: {centroid}") + + X = df.drop(columns=[target_column]) if (target_column is not None and target_column in df.columns) else df + X = X.values + plt.figure(figsize=(8, 6)) + plt.scatter(X[:, 0], X[:, 1], c=labels, cmap="viridis", s=30, alpha=0.7, label="Points") + plt.scatter(centroids[:, 0], centroids[:, 1], c="red", s=200, marker="X", label="Centroids") + plt.title("KMeans Clustering Result") + plt.xlabel("Feature 1") + plt.ylabel("Feature 2") + plt.legend() + plt.show() + else: + print("Unexpected response:", response_data) + + elif isinstance(result, np.ndarray) and result.dtype == object: + print("Server responded with object array.") + for item in result: + if isinstance(item, (bytes, bytearray)): + try: + item = item.decode() + except Exception: + pass + try: + response_data = json.loads(item) + except Exception: + response_data = item + if isinstance(response_data, list): + for entry in response_data: + label = entry.get("label") + probs = entry.get("probabilities", {}) + print(f"Prediction: {label}") + print("Probabilities:") + for k, v in probs.items(): + print(f" {k}: {v:.4f}") + print("-" * 30) + elif isinstance(response_data, dict): + label = response_data.get("label") + probs = response_data.get("probabilities", {}) + print(f"Prediction: {label}") + print("Probabilities:") + for k, v in probs.items(): + print(f" {k}: {v:.4f}") + print("-" * 30) + else: + print(response_data) + elif isinstance(result, (bytes, bytearray)): + try: + decoded = result.decode() + print("Decoded result:", decoded) + try: + response_data = json.loads(decoded) + print(json.dumps(response_data, indent=2)) + except Exception: + print(decoded) + except Exception: + print("Raw bytes result:", result) + elif isinstance(result, np.ndarray) and result.dtype in [np.float32, np.float64]: + print("Server responded with numeric array.") + print("Values:", result) + else: + print("Server response (raw):", str(result)) + + + except Exception as e: + print(f"ERROR: Inference call failed: {e}") + sys.exit(1) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/extras/iris_pipeline_project/client/client_train.py b/extras/iris_pipeline_project/client/client_train.py new file mode 100644 index 0000000000..11e8944c44 --- /dev/null +++ b/extras/iris_pipeline_project/client/client_train.py @@ -0,0 +1,131 @@ + +import os +import sys +import json +import pandas as pd +import numpy as np +import tritonclient.grpc as grpcclient +from sklearn.preprocessing import LabelEncoder + + +def main(): + SERVER_URL = "localhost:9000" + MODEL_NAME = "pipeline" + + if len(sys.argv) < 4 or sys.argv[1] not in ("train", "infer"): + print("Usage: python client_train.py " + "[--params ] [--encode ] [--model_class ]") + sys.exit(1) + + mode = sys.argv[1] + csv_path = sys.argv[2] + target_column = sys.argv[3] if sys.argv[3] != "NONE" else None + + params_path = None + encode_cols = [] + model_class_name = "LogisticRegressionTorch" + + if "--params" in sys.argv: + idx = sys.argv.index("--params") + if idx + 1 < len(sys.argv): + params_path = sys.argv[idx + 1] + + if "--encode" in sys.argv: + idx = sys.argv.index("--encode") + if idx + 1 < len(sys.argv): + encode_cols = sys.argv[idx + 1].split(",") + + if "--model_class" in sys.argv: + idx = sys.argv.index("--model_class") + if idx + 1 < len(sys.argv): + model_class_name = sys.argv[idx + 1] + + if not os.path.isfile(csv_path): + print(f"ERROR: Could not find CSV file: {csv_path}") + sys.exit(1) + + try: + df = pd.read_csv(csv_path) + print("Read CSV file successfully") + except Exception as e: + print(f"ERROR: Could not read CSV file: {e}") + sys.exit(1) + + if encode_cols: + for col in encode_cols: + if col in df.columns: + le = LabelEncoder() + df[col] = le.fit_transform(df[col]) + else: + print(f"WARNING: Encode column '{col}' not found in CSV") + + if model_class_name == "KMeans": + X = df.values + y = None + else: + if not target_column or target_column not in df.columns: + print(f"ERROR: Target column '{target_column}' not found in CSV") + sys.exit(1) + X = df.drop(columns=[target_column]).values + y = df[target_column].values.tolist() if mode == "train" else None + + params = {} + if params_path: + try: + with open(params_path, "r") as f: + params = json.load(f) + print(f"Loaded hyperparameters: {params}") + except Exception as e: + print(f"ERROR: Could not read params JSON: {e}") + sys.exit(1) + + payload = { + "mode": mode, + "X": X.tolist(), + "y": y, + "params": params, + "model_class": model_class_name + } + + input_bytes = json.dumps(payload).encode("utf-8") + pipeline_input = np.array([input_bytes], dtype=object) + + try: + client = grpcclient.InferenceServerClient(url=SERVER_URL) + print(f"Connected to OVMS at {SERVER_URL}") + except Exception as e: + print(f"ERROR: Could not connect to OVMS at {SERVER_URL}: {e}") + sys.exit(1) + + infer_input = grpcclient.InferInput("pipeline_input", pipeline_input.shape, "BYTES") + infer_input.set_data_from_numpy(pipeline_input) + + try: + response = client.infer( + model_name=MODEL_NAME, + inputs=[infer_input] + ) + result = response.as_numpy("pipeline_output") + + if isinstance(result, np.ndarray) and result.dtype == object: + print("Server response:") + for item in result: + if isinstance(item, (bytes, bytearray)): + try: + item = item.decode() + except Exception: + pass + try: + response_data = json.loads(item) + print(json.dumps(response_data, indent=2)) + except Exception: + print(item) + else: + print("Model trained successfully") + + except Exception as e: + print(f"ERROR: Inference call failed: {e}") + sys.exit(1) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/extras/iris_pipeline_project/data_folder/iris_test.csv b/extras/iris_pipeline_project/data_folder/iris_test.csv new file mode 100644 index 0000000000..aa74f99149 --- /dev/null +++ b/extras/iris_pipeline_project/data_folder/iris_test.csv @@ -0,0 +1,2 @@ +SepalLength,SepalWidth,PetalLength,PetalWidth +6.1,2.8,4.7,1.2 diff --git a/extras/iris_pipeline_project/data_folder/iris_train.csv b/extras/iris_pipeline_project/data_folder/iris_train.csv new file mode 100644 index 0000000000..94775782c1 --- /dev/null +++ b/extras/iris_pipeline_project/data_folder/iris_train.csv @@ -0,0 +1,508 @@ +SepalLength,SepalWidth,PetalLength,PetalWidth,Species +6.1,2.8,4.7,1.2,Iris-versicolor +5.7,3.8,1.7,0.3,Iris-setosa +7.7,2.6,6.9,2.3,Iris-virginica +6.0,2.9,4.5,1.5,Iris-versicolor +6.8,2.8,4.8,1.4,Iris-versicolor +5.4,3.4,1.5,0.4,Iris-setosa +5.6,2.9,3.6,1.3,Iris-versicolor +6.9,3.1,5.1,2.3,Iris-virginica +6.2,2.2,4.5,1.5,Iris-versicolor +5.8,2.7,3.9,1.2,Iris-versicolor +6.5,3.2,5.1,2.0,Iris-virginica +4.8,3.0,1.4,0.1,Iris-setosa +6.2,2.2,4.5,1.5,Iris-versicolor +5.6,2.5,3.9,1.1,Iris-versicolor +5.9,3.2,4.8,1.8,Iris-versicolor +6.1,2.8,4.0,1.3,Iris-versicolor +6.3,2.5,4.9,1.5,Iris-versicolor +6.1,2.8,4.7,1.2,Iris-versicolor +6.4,2.9,4.3,1.3,Iris-versicolor +6.6,3.0,4.4,1.4,Iris-versicolor +6.8,2.8,4.8,1.4,Iris-versicolor +6.7,3.0,5.0,1.7,Iris-versicolor +6.0,2.9,4.5,1.5,Iris-versicolor +5.7,2.6,3.5,1.0,Iris-versicolor +5.5,2.4,3.8,1.1,Iris-versicolor +5.5,2.4,3.7,1.0,Iris-versicolor +5.8,2.7,3.9,1.2,Iris-versicolor +6.0,2.7,5.1,1.6,Iris-versicolor +5.4,3.0,4.5,1.5,Iris-versicolor +6.0,3.4,4.5,1.6,Iris-versicolor +6.7,3.1,4.7,1.5,Iris-versicolor +6.3,2.3,4.4,1.3,Iris-versicolor +5.6,3.0,4.1,1.3,Iris-versicolor +5.5,2.5,4.0,1.3,Iris-versicolor +5.5,2.6,4.4,1.2,Iris-versicolor +6.1,3.0,4.6,1.4,Iris-versicolor +5.8,2.6,4.0,1.2,Iris-versicolor +5.0,2.3,3.3,1.0,Iris-versicolor +5.6,2.7,4.2,1.3,Iris-versicolor +5.7,3.0,4.2,1.2,Iris-versicolor +5.7,2.9,4.2,1.3,Iris-versicolor +6.2,2.9,4.3,1.3,Iris-versicolor +5.1,2.5,3.0,1.1,Iris-versicolor +5.7,2.8,4.1,1.3,Iris-versicolor +6.3,3.3,6.0,2.5,Iris-virginica +5.8,2.7,5.1,1.9,Iris-virginica +7.1,3.0,5.9,2.1,Iris-virginica +6.3,2.9,5.6,1.8,Iris-virginica +6.5,3.0,5.8,2.2,Iris-virginica +7.6,3.0,6.6,2.1,Iris-virginica +4.9,2.5,4.5,1.7,Iris-virginica +7.3,2.9,6.3,1.8,Iris-virginica +6.7,2.5,5.8,1.8,Iris-virginica +7.2,3.6,6.1,2.5,Iris-virginica +6.5,3.2,5.1,2.0,Iris-virginica +6.4,2.7,5.3,1.9,Iris-virginica +6.8,3.0,5.5,2.1,Iris-virginica +5.7,2.5,5.0,2.0,Iris-virginica +5.8,2.8,5.1,2.4,Iris-virginica +6.4,3.2,5.3,2.3,Iris-virginica +6.5,3.0,5.5,1.8,Iris-virginica +7.7,3.8,6.7,2.2,Iris-virginica +7.7,2.6,6.9,2.3,Iris-virginica +6.0,2.2,5.0,1.5,Iris-virginica +6.9,3.2,5.7,2.3,Iris-virginica +5.6,2.8,4.9,2.0,Iris-virginica +7.7,2.8,6.7,2.0,Iris-virginica +6.3,2.7,4.9,1.8,Iris-virginica +6.7,3.3,5.7,2.1,Iris-virginica +7.2,3.2,6.0,1.8,Iris-virginica +6.2,2.8,4.8,1.8,Iris-virginica +6.1,3.0,4.9,1.8,Iris-virginica +6.4,2.8,5.6,2.1,Iris-virginica +7.2,3.0,5.8,1.6,Iris-virginica +7.4,2.8,6.1,1.9,Iris-virginica +7.9,3.8,6.4,2.0,Iris-virginica +6.4,2.8,5.6,2.2,Iris-virginica +6.3,2.8,5.1,1.5,Iris-virginica +6.1,2.6,5.6,1.4,Iris-virginica +7.7,3.0,6.1,2.3,Iris-virginica +6.3,3.4,5.6,2.4,Iris-virginica +6.4,3.1,5.5,1.8,Iris-virginica +6.0,3.0,4.8,1.8,Iris-virginica +6.9,3.1,5.4,2.1,Iris-virginica +6.7,3.1,5.6,2.4,Iris-virginica +6.9,3.1,5.1,2.3,Iris-virginica +5.8,2.7,5.1,1.9,Iris-virginica +6.8,3.2,5.9,2.3,Iris-virginica +4.9,3.1,1.5,0.1,Iris-setosa +5.4,3.7,1.5,0.2,Iris-setosa +4.8,3.4,1.6,0.2,Iris-setosa +4.8,3.0,1.4,0.1,Iris-setosa +4.3,3.0,1.1,0.1,Iris-setosa +5.8,4.0,1.2,0.2,Iris-setosa +5.7,4.4,1.5,0.4,Iris-setosa +5.4,3.9,1.3,0.4,Iris-setosa +5.1,3.5,1.4,0.3,Iris-setosa +5.7,3.8,1.7,0.3,Iris-setosa +5.1,3.8,1.5,0.3,Iris-setosa +5.4,3.4,1.7,0.2,Iris-setosa +5.1,3.7,1.5,0.4,Iris-setosa +4.6,3.6,1.0,0.2,Iris-setosa +5.1,3.3,1.7,0.5,Iris-setosa +4.8,3.4,1.9,0.2,Iris-setosa +5.0,3.0,1.6,0.2,Iris-setosa +5.0,3.4,1.6,0.4,Iris-setosa +5.2,3.5,1.5,0.2,Iris-setosa +5.2,3.4,1.4,0.2,Iris-setosa +4.7,3.2,1.6,0.2,Iris-setosa +4.8,3.1,1.6,0.2,Iris-setosa +5.4,3.4,1.5,0.4,Iris-setosa +5.2,4.1,1.5,0.1,Iris-setosa +5.5,4.2,1.4,0.2,Iris-setosa +4.9,3.1,1.5,0.1,Iris-setosa +5.0,3.2,1.2,0.2,Iris-setosa +5.5,3.5,1.3,0.2,Iris-setosa +4.9,3.1,1.5,0.1,Iris-setosa +6.7,3.3,5.7,2.5,Iris-virginica +6.7,3.0,5.2,2.3,Iris-virginica +6.3,2.5,5.0,1.9,Iris-virginica +6.5,3.0,5.2,2.0,Iris-virginica +6.2,3.4,5.4,2.3,Iris-virginica +5.9,3.0,5.1,1.8,Iris-virginica +6.1,2.6,5.6,1.4,Iris-virginica +7.7,3.0,6.1,2.3,Iris-virginica +6.3,3.4,5.6,2.4,Iris-virginica +6.4,3.1,5.5,1.8,Iris-virginica +6.0,3.0,4.8,1.8,Iris-virginica +6.9,3.1,5.4,2.1,Iris-virginica +6.7,3.1,5.6,2.4,Iris-virginica +6.9,3.1,5.1,2.3,Iris-virginica +5.8,2.7,5.1,1.9,Iris-virginica +6.8,3.2,5.9,2.3,Iris-virginica +6.7,3.3,5.7,2.5,Iris-virginica +6.7,3.0,5.2,2.3,Iris-virginica +6.3,2.5,5.0,1.9,Iris-virginica +6.5,3.0,5.2,2.0,Iris-virginica +6.2,3.4,5.4,2.3,Iris-virginica +5.9,3.0,5.1,1.8,Iris-virginica +6.2,2.2,4.5,1.5,Iris-versicolor +5.6,2.5,3.9,1.1,Iris-versicolor +5.9,3.2,4.8,1.8,Iris-versicolor +6.1,2.8,4.0,1.3,Iris-versicolor +6.3,2.5,4.9,1.5,Iris-versicolor +6.1,2.8,4.7,1.2,Iris-versicolor +6.4,2.9,4.3,1.3,Iris-versicolor +6.6,3.0,4.4,1.4,Iris-versicolor +6.8,2.8,4.8,1.4,Iris-versicolor +6.7,3.0,5.0,1.7,Iris-versicolor +6.0,2.9,4.5,1.5,Iris-versicolor +5.7,2.6,3.5,1.0,Iris-versicolor +5.5,2.4,3.8,1.1,Iris-versicolor +5.5,2.4,3.7,1.0,Iris-versicolor +5.8,2.7,3.9,1.2,Iris-versicolor +6.0,2.7,5.1,1.6,Iris-versicolor +5.4,3.0,4.5,1.5,Iris-versicolor +6.0,3.4,4.5,1.6,Iris-versicolor +6.7,3.1,4.7,1.5,Iris-versicolor +6.3,2.3,4.4,1.3,Iris-versicolor +5.6,3.0,4.1,1.3,Iris-versicolor +5.5,2.5,4.0,1.3,Iris-versicolor +5.5,2.6,4.4,1.2,Iris-versicolor +5.5,3.5,1.3,0.2,Iris-setosa +4.9,3.1,1.5,0.1,Iris-setosa +5.1,3.8,1.5,0.3,Iris-setosa +6.3,3.3,4.7,1.6,Iris-versicolor +6.5,3.0,5.8,2.2,Iris-virginica +5.6,2.5,3.9,1.1,Iris-versicolor +5.7,2.8,4.5,1.3,Iris-versicolor +6.4,2.8,5.6,2.2,Iris-virginica +4.7,3.2,1.6,0.2,Iris-setosa +6.1,3.0,4.9,1.8,Iris-virginica +5.0,3.4,1.6,0.4,Iris-setosa +6.4,2.8,5.6,2.1,Iris-virginica +7.9,3.8,6.4,2.0,Iris-virginica +6.7,3.0,5.2,2.3,Iris-virginica +6.7,2.5,5.8,1.8,Iris-virginica +6.8,3.2,5.9,2.3,Iris-virginica +4.8,3.0,1.4,0.3,Iris-setosa +4.8,3.1,1.6,0.2,Iris-setosa +4.6,3.6,1.0,0.2,Iris-setosa +5.7,4.4,1.5,0.4,Iris-setosa +6.7,3.1,4.4,1.4,Iris-versicolor +4.8,3.4,1.6,0.2,Iris-setosa +4.4,3.2,1.3,0.2,Iris-setosa +6.3,2.5,5.0,1.9,Iris-virginica +6.4,3.2,4.5,1.5,Iris-versicolor +5.2,3.5,1.5,0.2,Iris-setosa +5.0,3.6,1.4,0.2,Iris-setosa +5.2,4.1,1.5,0.1,Iris-setosa +5.8,2.7,5.1,1.9,Iris-virginica +6.0,3.4,4.5,1.6,Iris-versicolor +6.7,3.1,4.7,1.5,Iris-versicolor +5.4,3.9,1.3,0.4,Iris-setosa +5.4,3.7,1.5,0.2,Iris-setosa +5.5,2.4,3.7,1.0,Iris-versicolor +6.3,2.8,5.1,1.5,Iris-virginica +6.4,3.1,5.5,1.8,Iris-virginica +6.6,3.0,4.4,1.4,Iris-versicolor +7.2,3.6,6.1,2.5,Iris-virginica +5.7,2.9,4.2,1.3,Iris-versicolor +7.6,3.0,6.6,2.1,Iris-virginica +5.6,3.0,4.5,1.5,Iris-versicolor +5.1,3.5,1.4,0.2,Iris-setosa +7.7,2.8,6.7,2.0,Iris-virginica +5.8,2.7,4.1,1.0,Iris-versicolor +5.2,3.4,1.4,0.2,Iris-setosa +5.0,3.5,1.3,0.3,Iris-setosa +5.1,3.8,1.9,0.4,Iris-setosa +5.0,2.0,3.5,1.0,Iris-versicolor +6.3,2.7,4.9,1.8,Iris-virginica +4.8,3.4,1.9,0.2,Iris-setosa +5.0,3.0,1.6,0.2,Iris-setosa +5.1,3.3,1.7,0.5,Iris-setosa +5.6,2.7,4.2,1.3,Iris-versicolor +5.1,3.4,1.5,0.2,Iris-setosa +5.7,3.0,4.2,1.2,Iris-versicolor +7.7,3.8,6.7,2.2,Iris-virginica +4.6,3.2,1.4,0.2,Iris-setosa +6.2,2.9,4.3,1.3,Iris-versicolor +5.7,2.5,5.0,2.0,Iris-virginica +5.5,4.2,1.4,0.2,Iris-setosa +6.0,3.0,4.8,1.8,Iris-virginica +5.8,2.7,5.1,1.9,Iris-virginica +6.0,2.2,4.0,1.0,Iris-versicolor +5.4,3.0,4.5,1.5,Iris-versicolor +6.2,3.4,5.4,2.3,Iris-virginica +5.5,2.3,4.0,1.3,Iris-versicolor +5.4,3.9,1.7,0.4,Iris-setosa +5.0,2.3,3.3,1.0,Iris-versicolor +6.4,2.7,5.3,1.9,Iris-virginica +5.0,3.3,1.4,0.2,Iris-setosa +5.0,3.2,1.2,0.2,Iris-setosa +5.5,2.4,3.8,1.1,Iris-versicolor +6.7,3.0,5.0,1.7,Iris-versicolor +4.9,3.1,1.5,0.1,Iris-setosa +5.8,2.8,5.1,2.4,Iris-virginica +5.0,3.4,1.5,0.2,Iris-setosa +5.0,3.5,1.6,0.6,Iris-setosa +5.9,3.2,4.8,1.8,Iris-versicolor +5.1,2.5,3.0,1.1,Iris-versicolor +6.9,3.2,5.7,2.3,Iris-virginica +6.0,2.7,5.1,1.6,Iris-versicolor +6.1,2.6,5.6,1.4,Iris-virginica +7.7,3.0,6.1,2.3,Iris-virginica +5.5,2.5,4.0,1.3,Iris-versicolor +4.4,2.9,1.4,0.2,Iris-setosa +4.3,3.0,1.1,0.1,Iris-setosa +6.0,2.2,5.0,1.5,Iris-virginica +7.2,3.2,6.0,1.8,Iris-virginica +4.6,3.1,1.5,0.2,Iris-setosa +5.1,3.5,1.4,0.3,Iris-setosa +4.4,3.0,1.3,0.2,Iris-setosa +6.3,2.5,4.9,1.5,Iris-versicolor +6.3,3.4,5.6,2.4,Iris-virginica +4.6,3.4,1.4,0.3,Iris-setosa +6.8,3.0,5.5,2.1,Iris-virginica +6.3,3.3,6.0,2.5,Iris-virginica +4.7,3.2,1.3,0.2,Iris-setosa +6.1,2.9,4.7,1.4,Iris-versicolor +5.0,3.6,1.4,0.2,Iris-setosa +5.4,3.9,1.7,0.4,Iris-setosa +4.6,3.4,1.4,0.3,Iris-setosa +5.0,3.4,1.5,0.2,Iris-setosa +4.4,2.9,1.4,0.2,Iris-setosa +4.9,3.1,1.5,0.1,Iris-setosa +5.4,3.7,1.5,0.2,Iris-setosa +4.8,3.4,1.6,0.2,Iris-setosa +6.9,3.1,4.9,1.5,Iris-versicolor +5.5,2.3,4.0,1.3,Iris-versicolor +6.5,2.8,4.6,1.5,Iris-versicolor +5.7,2.8,4.5,1.3,Iris-versicolor +6.3,3.3,4.7,1.6,Iris-versicolor +4.9,2.4,3.3,1.0,Iris-versicolor +6.6,2.9,4.6,1.3,Iris-versicolor +5.2,2.7,3.9,1.4,Iris-versicolor +5.0,2.0,3.5,1.0,Iris-versicolor +5.9,3.0,4.2,1.5,Iris-versicolor +6.0,2.2,4.0,1.0,Iris-versicolor +6.1,2.9,4.7,1.4,Iris-versicolor +6.5,2.8,4.6,1.5,Iris-versicolor +6.2,2.8,4.8,1.8,Iris-virginica +7.0,3.2,4.7,1.4,Iris-versicolor +6.4,3.2,5.3,2.3,Iris-virginica +5.1,3.8,1.6,0.2,Iris-setosa +6.9,3.1,5.4,2.1,Iris-virginica +5.9,3.0,4.2,1.5,Iris-versicolor +6.5,3.0,5.2,2.0,Iris-virginica +5.7,2.6,3.5,1.0,Iris-versicolor +5.2,2.7,3.9,1.4,Iris-versicolor +6.1,3.0,4.6,1.4,Iris-versicolor +4.5,2.3,1.3,0.3,Iris-setosa +6.6,2.9,4.6,1.3,Iris-versicolor +5.5,2.6,4.4,1.2,Iris-versicolor +5.3,3.7,1.5,0.2,Iris-setosa +5.6,3.0,4.1,1.3,Iris-versicolor +7.3,2.9,6.3,1.8,Iris-virginica +6.7,3.3,5.7,2.1,Iris-virginica +5.1,3.7,1.5,0.4,Iris-setosa +4.9,2.4,3.3,1.0,Iris-versicolor +6.7,3.3,5.7,2.5,Iris-virginica +7.2,3.0,5.8,1.6,Iris-virginica +4.9,3.1,1.5,0.1,Iris-setosa +6.7,3.1,5.6,2.4,Iris-virginica +4.9,3.0,1.4,0.2,Iris-setosa +6.9,3.1,4.9,1.5,Iris-versicolor +7.4,2.8,6.1,1.9,Iris-virginica +6.3,2.9,5.6,1.8,Iris-virginica +5.7,2.8,4.1,1.3,Iris-versicolor +6.5,3.0,5.5,1.8,Iris-virginica +6.3,2.3,4.4,1.3,Iris-versicolor +6.4,2.9,4.3,1.3,Iris-versicolor +5.6,2.8,4.9,2.0,Iris-virginica +5.9,3.0,5.1,1.8,Iris-virginica +5.4,3.4,1.7,0.2,Iris-setosa +6.1,2.8,4.0,1.3,Iris-versicolor +4.9,2.5,4.5,1.7,Iris-virginica +5.8,4.0,1.2,0.2,Iris-setosa +5.8,2.6,4.0,1.2,Iris-versicolor +7.1,3.0,5.9,2.1,Iris-virginica +5.1,3.5,1.4,0.2,Iris-setosa +4.9,3.0,1.4,0.2,Iris-setosa +4.7,3.2,1.3,0.2,Iris-setosa +4.6,3.1,1.5,0.2,Iris-setosa +5.0,3.6,1.4,0.2,Iris-setosa +5.4,3.9,1.7,0.4,Iris-setosa +4.6,3.4,1.4,0.3,Iris-setosa +5.0,3.4,1.5,0.2,Iris-setosa +4.4,2.9,1.4,0.2,Iris-setosa +4.9,3.1,1.5,0.1,Iris-setosa +5.4,3.7,1.5,0.2,Iris-setosa +4.8,3.4,1.6,0.2,Iris-setosa +6.9,3.1,4.9,1.5,Iris-versicolor +5.5,2.3,4.0,1.3,Iris-versicolor +6.5,2.8,4.6,1.5,Iris-versicolor +5.7,2.8,4.5,1.3,Iris-versicolor +6.3,3.3,4.7,1.6,Iris-versicolor +4.9,2.4,3.3,1.0,Iris-versicolor +6.6,2.9,4.6,1.3,Iris-versicolor +5.2,2.7,3.9,1.4,Iris-versicolor +5.0,2.0,3.5,1.0,Iris-versicolor +5.9,3.0,4.2,1.5,Iris-versicolor +6.0,2.2,4.0,1.0,Iris-versicolor +6.1,2.9,4.7,1.4,Iris-versicolor +5.6,2.9,3.6,1.3,Iris-versicolor +6.7,3.1,4.4,1.4,Iris-versicolor +5.6,3.0,4.5,1.5,Iris-versicolor +5.8,2.7,4.1,1.0,Iris-versicolor +6.2,2.2,4.5,1.5,Iris-versicolor +5.6,2.5,3.9,1.1,Iris-versicolor +5.9,3.2,4.8,1.8,Iris-versicolor +6.1,2.8,4.0,1.3,Iris-versicolor +6.3,2.5,4.9,1.5,Iris-versicolor +6.1,2.8,4.7,1.2,Iris-versicolor +6.3,3.3,6.0,2.5,Iris-virginica +5.8,2.7,5.1,1.9,Iris-virginica +7.1,3.0,5.9,2.1,Iris-virginica +6.3,2.9,5.6,1.8,Iris-virginica +6.5,3.0,5.8,2.2,Iris-virginica +7.6,3.0,6.6,2.1,Iris-virginica +4.9,2.5,4.5,1.7,Iris-virginica +7.3,2.9,6.3,1.8,Iris-virginica +6.7,2.5,5.8,1.8,Iris-virginica +7.2,3.6,6.1,2.5,Iris-virginica +6.5,3.2,5.1,2.0,Iris-virginica +6.4,2.7,5.3,1.9,Iris-virginica +6.8,3.0,5.5,2.1,Iris-virginica +5.7,2.5,5.0,2.0,Iris-virginica +5.8,2.8,5.1,2.4,Iris-virginica +6.4,3.2,5.3,2.3,Iris-virginica +6.5,3.0,5.5,1.8,Iris-virginica +7.7,3.8,6.7,2.2,Iris-virginica +7.7,2.6,6.9,2.3,Iris-virginica +4.8,3.0,1.4,0.1,Iris-setosa +4.3,3.0,1.1,0.1,Iris-setosa +5.8,4.0,1.2,0.2,Iris-setosa +5.7,4.4,1.5,0.4,Iris-setosa +5.4,3.9,1.3,0.4,Iris-setosa +5.1,3.5,1.4,0.3,Iris-setosa +5.7,3.8,1.7,0.3,Iris-setosa +7.0,3.2,4.7,1.4,Iris-versicolor +6.4,3.2,4.5,1.5,Iris-versicolor +6.9,3.1,4.9,1.5,Iris-versicolor +5.5,2.3,4.0,1.3,Iris-versicolor +6.5,2.8,4.6,1.5,Iris-versicolor +5.7,2.8,4.5,1.3,Iris-versicolor +6.3,3.3,4.7,1.6,Iris-versicolor +4.9,2.4,3.3,1.0,Iris-versicolor +6.6,2.9,4.6,1.3,Iris-versicolor +5.2,2.7,3.9,1.4,Iris-versicolor +5.0,2.0,3.5,1.0,Iris-versicolor +5.9,3.0,4.2,1.5,Iris-versicolor +6.0,2.2,4.0,1.0,Iris-versicolor +7.3,2.9,6.3,1.8,Iris-virginica +6.7,3.3,5.7,2.1,Iris-virginica +5.1,3.7,1.5,0.4,Iris-setosa +4.9,2.4,3.3,1.0,Iris-versicolor +6.7,3.3,5.7,2.5,Iris-virginica +7.2,3.0,5.8,1.6,Iris-virginica +4.9,3.1,1.5,0.1,Iris-setosa +6.7,3.1,5.6,2.4,Iris-virginica +4.9,3.0,1.4,0.2,Iris-setosa +6.9,3.1,4.9,1.5,Iris-versicolor +4.3,3.0,1.1,0.1,Iris-setosa +5.8,4.0,1.2,0.2,Iris-setosa +5.7,4.4,1.5,0.4,Iris-setosa +5.4,3.9,1.3,0.4,Iris-setosa +5.1,3.5,1.4,0.3,Iris-setosa +5.7,3.8,1.7,0.3,Iris-setosa +7.0,3.2,4.7,1.4,Iris-versicolor +6.4,3.2,4.5,1.5,Iris-versicolor +6.9,3.1,4.9,1.5,Iris-versicolor +5.5,2.3,4.0,1.3,Iris-versicolor +6.5,2.8,4.6,1.5,Iris-versicolor +5.7,2.8,4.5,1.3,Iris-versicolor +6.3,3.3,4.7,1.6,Iris-versicolor +4.9,2.4,3.3,1.0,Iris-versicolor +6.6,2.9,4.6,1.3,Iris-versicolor +5.2,2.7,3.9,1.4,Iris-versicolor +7.4,2.8,6.1,1.9,Iris-virginica +6.3,2.9,5.6,1.8,Iris-virginica +5.7,2.8,4.1,1.3,Iris-versicolor +6.5,3.0,5.5,1.8,Iris-virginica +6.3,2.3,4.4,1.3,Iris-versicolor +6.4,2.9,4.3,1.3,Iris-versicolor +5.6,2.8,4.9,2.0,Iris-virginica +5.9,3.0,5.1,1.8,Iris-virginica +5.4,3.4,1.7,0.2,Iris-setosa +6.1,2.8,4.0,1.3,Iris-versicolor +4.9,2.5,4.5,1.7,Iris-virginica +5.8,4.0,1.2,0.2,Iris-setosa +5.8,2.6,4.0,1.2,Iris-versicolor +7.1,3.0,5.9,2.1,Iris-virginica +5.1,3.5,1.4,0.2,Iris-setosa +4.9,3.0,1.4,0.2,Iris-setosa +4.7,3.2,1.3,0.2,Iris-setosa +4.6,3.1,1.5,0.2,Iris-setosa +5.0,3.6,1.4,0.2,Iris-setosa +5.4,3.9,1.7,0.4,Iris-setosa +4.6,3.4,1.4,0.3,Iris-setosa +5.0,3.4,1.5,0.2,Iris-setosa +4.4,2.9,1.4,0.2,Iris-setosa +4.9,3.1,1.5,0.1,Iris-setosa +6.1,2.9,4.7,1.4,Iris-versicolor +5.6,2.9,3.6,1.3,Iris-versicolor +6.7,3.1,4.4,1.4,Iris-versicolor +5.6,3.0,4.5,1.5,Iris-versicolor +5.8,2.7,4.1,1.0,Iris-versicolor +6.2,2.2,4.5,1.5,Iris-versicolor +5.6,2.5,3.9,1.1,Iris-versicolor +5.9,3.2,4.8,1.8,Iris-versicolor +6.1,2.8,4.0,1.3,Iris-versicolor +6.3,2.5,4.9,1.5,Iris-versicolor +6.1,2.8,4.7,1.2,Iris-versicolor +6.3,3.3,6.0,2.5,Iris-virginica +5.8,2.7,5.1,1.9,Iris-virginica +7.1,3.0,5.9,2.1,Iris-virginica +6.3,2.9,5.6,1.8,Iris-virginica +6.5,3.0,5.8,2.2,Iris-virginica +7.6,3.0,6.6,2.1,Iris-virginica +4.9,2.5,4.5,1.7,Iris-virginica +7.3,2.9,6.3,1.8,Iris-virginica +6.7,2.5,5.8,1.8,Iris-virginica +7.2,3.6,6.1,2.5,Iris-virginica +6.5,3.2,5.1,2.0,Iris-virginica +6.4,2.7,5.3,1.9,Iris-virginica +6.8,3.0,5.5,2.1,Iris-virginica +5.7,2.5,5.0,2.0,Iris-virginica +5.8,2.8,5.1,2.4,Iris-virginica +6.4,3.2,5.3,2.3,Iris-virginica +6.5,3.0,5.5,1.8,Iris-virginica +7.7,3.8,6.7,2.2,Iris-virginica +7.7,2.6,6.9,2.3,Iris-virginica +6.0,2.2,5.0,1.5,Iris-virginica +6.9,3.2,5.7,2.3,Iris-virginica +5.6,2.8,4.9,2.0,Iris-virginica +7.7,2.8,6.7,2.0,Iris-virginica +6.3,2.7,4.9,1.8,Iris-virginica +6.7,3.3,5.7,2.1,Iris-virginica +7.2,3.2,6.0,1.8,Iris-virginica +6.2,2.8,4.8,1.8,Iris-virginica +6.1,3.0,4.9,1.8,Iris-virginica +6.4,2.8,5.6,2.1,Iris-virginica +7.2,3.0,5.8,1.6,Iris-virginica +7.4,2.8,6.1,1.9,Iris-virginica +7.9,3.8,6.4,2.0,Iris-virginica +6.4,2.8,5.6,2.2,Iris-virginica +6.3,2.8,5.1,1.5,Iris-virginica +6.1,2.6,5.6,1.4,Iris-virginica +7.7,3.0,6.1,2.3,Iris-virginica +6.3,3.4,5.6,2.4,Iris-virginica +6.4,3.1,5.5,1.8,Iris-virginica +6.0,3.0,4.8,1.8,Iris-virginica +6.9,3.1,5.4,2.1,Iris-virginica +6.7,3.1,5.6,2.4,Iris-virginica +6.9,3.1,5.1,2.3,Iris-virginica +5.8,2.7,5.1,1.9,Iris-virginica +6.8,3.2,5.9,2.3,Iris-virginica +4.9,3.1,1.5,0.1,Iris-setosa +5.4,3.7,1.5,0.2,Iris-setosa +4.8,3.4,1.6,0.2,Iris-setosa +4.8,3.0,1.4,0.1,Iris-setosa +4.3,3.0,1.1,0.1,Iris-setosa +5.8,4.0,1.2,0.2,Iris-setosa +5.7,4.4,1.5,0.4,Iris-setosa +5.4,3.9,1.3,0.4,Iris-setosa +5.1,3.5,1.4,0.3,Iris-setosa \ No newline at end of file diff --git a/extras/iris_pipeline_project/data_preprocess.py b/extras/iris_pipeline_project/data_preprocess.py new file mode 100644 index 0000000000..c9f1694485 --- /dev/null +++ b/extras/iris_pipeline_project/data_preprocess.py @@ -0,0 +1,26 @@ +import pandas as pd +from sklearn.utils import shuffle +import os +import sys + +if len(sys.argv) < 2: + print("Usage: python datapreprocess.py ") + sys.exit(1) + +output_dir = sys.argv[1] + +#os.makedirs(output_dir, exist_ok=True) + +columns = ["SepalLength", "SepalWidth", "PetalLength", "PetalWidth", "Species"] + +df = pd.read_csv("iris.csv", header=None, names=columns) +df.dropna(inplace=True) +df = shuffle(df, random_state=42).reset_index(drop=True) + +train_path = os.path.join(output_dir, "iris_train.csv") +test_path = os.path.join(output_dir, "iris_test.csv") + +df.to_csv(train_path, index=False) +df.drop(columns=["Species"]).iloc[[0]].to_csv(test_path, index=False) + +print(f"Cleaned and saved iris_train.csv and iris_test.csv to '{output_dir}/'") diff --git a/extras/iris_pipeline_project/hyperparams.json b/extras/iris_pipeline_project/hyperparams.json new file mode 100644 index 0000000000..b0a9b77e79 --- /dev/null +++ b/extras/iris_pipeline_project/hyperparams.json @@ -0,0 +1,5 @@ +{ + "max_iter": 500, + "C": 0.8, + "solver": "liblinear" +} diff --git a/extras/iris_pipeline_project/kmeans_param.json b/extras/iris_pipeline_project/kmeans_param.json new file mode 100644 index 0000000000..119a724807 --- /dev/null +++ b/extras/iris_pipeline_project/kmeans_param.json @@ -0,0 +1,4 @@ +{ + "n_clusters": 3, + "random_state": 42 +} \ No newline at end of file diff --git a/extras/iris_pipeline_project/labelmap.json b/extras/iris_pipeline_project/labelmap.json new file mode 100644 index 0000000000..c9fe009386 --- /dev/null +++ b/extras/iris_pipeline_project/labelmap.json @@ -0,0 +1,6 @@ +{ + "1": "Iris-versicolor", + "0": "Iris-setosa", + "2": "Iris-virginica" +} + diff --git a/extras/iris_pipeline_project/model_config.json b/extras/iris_pipeline_project/model_config.json new file mode 100644 index 0000000000..c36f0df598 --- /dev/null +++ b/extras/iris_pipeline_project/model_config.json @@ -0,0 +1,16 @@ +{ + "model_config_list": [ + { + "config": { + "name": "infer_pipeline", + "base_path": "/workspace/model/generic_model" + } + } + ], + "mediapipe_config_list": [ + { + "name": "pipeline", + "graph_path": "/workspace/pipeline/graph.pbtxt" + } + ] +} diff --git a/extras/iris_pipeline_project/pipeline/graph.pbtxt b/extras/iris_pipeline_project/pipeline/graph.pbtxt new file mode 100644 index 0000000000..7eb12f8593 --- /dev/null +++ b/extras/iris_pipeline_project/pipeline/graph.pbtxt @@ -0,0 +1,15 @@ +input_stream: "OVMS_PY_TENSOR:pipeline_input" +output_stream: "OVMS_PY_TENSOR:pipeline_output" + +node { + name: "python_node" + calculator: "PythonExecutorCalculator" + input_side_packet: "PYTHON_NODE_RESOURCES:py" + input_stream: "PIPELINE_INPUT:pipeline_input" + output_stream: "PIPELINE_OUTPUT:pipeline_output" + node_options: { + [type.googleapis.com/mediapipe.PythonExecutorCalculatorOptions]: { + handler_path: "/workspace/pipeline/ovmsmodel.py" + } + } +} diff --git a/extras/iris_pipeline_project/pipeline/model.py b/extras/iris_pipeline_project/pipeline/model.py new file mode 100644 index 0000000000..3f3ab071b7 --- /dev/null +++ b/extras/iris_pipeline_project/pipeline/model.py @@ -0,0 +1,117 @@ +import abc +import time +import numpy as np +import torch +import torch.nn as nn +from sklearn.cluster import KMeans +from sklearnex import patch_sklearn, unpatch_sklearn +import intel_extension_for_pytorch as ipex + + +class ModelClass(abc.ABC): + @abc.abstractmethod + def fit(self, X: np.ndarray, y: np.ndarray, params: dict): + pass + + @abc.abstractmethod + def predict(self, X: np.ndarray): + pass + + +class LogisticRegressionTorch(ModelClass): + def __init__(self, use_ipex=False): + self.model = None + self.device = torch.device("cpu") + self.use_ipex = use_ipex + + def fit(self, X: np.ndarray, y: np.ndarray, params: dict): + try: + if self.use_ipex: + import intel_extension_for_pytorch as ipex + use_ipex = True + else: + use_ipex = False + except ImportError: + use_ipex = False + + X_tensor = torch.from_numpy(X).float().to(self.device) + y_tensor = torch.from_numpy(y).long().to(self.device) + + input_dim = X.shape[1] + num_classes = len(np.unique(y)) + self.model = nn.Linear(input_dim, num_classes) + + criterion = nn.CrossEntropyLoss() + lr = params.get("lr", 0.01) + epochs = params.get("epochs", 100) + optimizer = torch.optim.SGD(self.model.parameters(), lr=lr) + + if use_ipex: + self.model, optimizer = ipex.optimize(self.model, optimizer=optimizer, dtype=torch.float32) + + start = time.perf_counter() + self.model.train() + for _ in range(epochs): + optimizer.zero_grad() + outputs = self.model(X_tensor) + loss = criterion(outputs, y_tensor) + loss.backward() + optimizer.step() + end = time.perf_counter() + + print(f"[Torch LogisticRegression] Training time (IPEX={use_ipex}): {end - start:.4f} sec") + return self + + def predict(self, X: np.ndarray): + if self.model is None: + raise RuntimeError("Model not trained") + + X_tensor = torch.from_numpy(X).float().to(self.device) + start = time.perf_counter() + self.model.eval() + with torch.no_grad(): + logits = self.model(X_tensor) + probs = torch.softmax(logits, dim=1) + preds = torch.argmax(probs, dim=1).cpu().numpy() + end = time.perf_counter() + + print(f"[Torch LogisticRegression] Inference time: {end - start:.4f} sec") + return preds, probs.cpu().numpy() + + +class KMeansSkLearn(ModelClass): + def __init__(self, use_onedal=False): + self.model = None + self.use_onedal = use_onedal + + def fit(self, X: np.ndarray, y: np.ndarray, params: dict): + if self.use_onedal: + patch_sklearn() + else: + unpatch_sklearn() + + n_clusters = params.get("n_clusters", 3) + self.model = KMeans(n_clusters=n_clusters, random_state=42) + + start = time.perf_counter() + self.model.fit(X) + end = time.perf_counter() + + print(f"[Sklearn KMeans] Training time (oneDAL={self.use_onedal}): {end - start:.4f} sec") + return self + + def predict(self, X: np.ndarray): + if self.model is None: + raise RuntimeError("Model not trained") + + start = time.perf_counter() + labels = self.model.predict(X) + centroids = self.model.cluster_centers_ + end = time.perf_counter() + print(f"[Sklearn KMeans] Inference time (oneDAL={self.use_onedal}): {end - start:.4f} sec") + return labels, centroids + + def get_inertia(self): + if self.model is None: + raise RuntimeError("Model not trained") + return self.model.inertia_ diff --git a/extras/iris_pipeline_project/pipeline/ovmsmodel.py b/extras/iris_pipeline_project/pipeline/ovmsmodel.py new file mode 100644 index 0000000000..fc733fb80b --- /dev/null +++ b/extras/iris_pipeline_project/pipeline/ovmsmodel.py @@ -0,0 +1,132 @@ +import os +import json +import numpy as np +import torch +import torch.nn as nn +import joblib +from pyovms import Tensor +from model import LogisticRegressionTorch, KMeansSkLearn, ModelClass +from sklearn.preprocessing import LabelEncoder +from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score + + +MODEL_PATH = "/workspace/model/generic_model/model.pt" +ENCODER_PATH = "/workspace/model/generic_model/label_encoder.joblib" +META_PATH = "/workspace/model/generic_model/meta.json" + +AVAILABLE_MODEL_CLASSES = { + "LogisticRegressionTorch": LogisticRegressionTorch, + "KMeansSkLearn": KMeansSkLearn +} + +class OvmsPythonModel: + def initialize(self, kwargs): + print("[initialize] Python node initialized", flush=True) + self.model_obj = None + + def execute(self, inputs): + try: + inp_bytes = bytes(inputs[0].data) + first_brace = inp_bytes.find(b'{') + if first_brace > 0: + inp_bytes = inp_bytes[first_brace:] + payload = json.loads(inp_bytes.decode("utf-8")) + + mode = payload.get("mode") + X = np.array(payload.get("X"), dtype=np.float32) + y = payload.get("y", None) + params = payload.get("params", {}) + model_class_name = payload.get("model_class", "LogisticRegressionTorch") + + if model_class_name not in AVAILABLE_MODEL_CLASSES: + raise ValueError(f"Unknown model: {model_class_name}") + + model_obj = AVAILABLE_MODEL_CLASSES[model_class_name]() + + if model_class_name == "KMeansSkLearn": + if mode == "train": + trained = model_obj.fit(X, None, params.get("train_params", {})) + joblib.dump(trained.model, MODEL_PATH) + with open(META_PATH, "w") as f: + json.dump({ + "num_features": X.shape[1], + "n_clusters": trained.model.n_clusters + }, f) + inertia = trained.model.inertia_ + return [Tensor("pipeline_output", np.array([1.0, inertia], dtype=np.float32))] + elif mode == "infer": + if not os.path.exists(META_PATH) or not os.path.exists(MODEL_PATH): + raise FileNotFoundError("No model checkpoint found") + with open(META_PATH, "r") as f: + meta = json.load(f) + model_obj.model = joblib.load(MODEL_PATH) + labels = model_obj.model.predict(X) + centroids = model_obj.model.cluster_centers_ + response = { + "labels": labels.tolist(), + "centroids": centroids.tolist() + } + print(response, flush=True) + json_bytes = json.dumps(response).encode("utf-8") + + output = np.array([json_bytes], dtype=object) + + return [Tensor("pipeline_output", output)] + else: + raise ValueError(f"Unknown mode '{mode}' for KMeansSkLearn") + else: + if mode == "train": + if y is None: + raise ValueError("y labels are required for training") + y = np.array(y) + le = LabelEncoder() + y_enc = le.fit_transform(y) + + trained = model_obj.fit(X, y_enc, params.get("train_params", {})) + + torch.save(trained.model.state_dict(), MODEL_PATH) + joblib.dump(le, ENCODER_PATH) + with open(META_PATH, "w") as f: + json.dump({"num_features": X.shape[1], "num_classes": len(le.classes_)}, f) + + preds, _ = trained.predict(X) + acc = accuracy_score(y_enc, preds) + prec = precision_score(y_enc, preds, average='weighted', zero_division=0) + rec = recall_score(y_enc, preds, average='weighted', zero_division=0) + f1 = f1_score(y_enc, preds, average='weighted', zero_division=0) + + return [Tensor("pipeline_output", np.array([1.0, acc, prec, rec, f1], dtype=np.float32))] + + elif mode == "infer": + if not os.path.exists(MODEL_PATH): + raise FileNotFoundError("No model checkpoint found") + + with open(META_PATH, "r") as f: + meta = json.load(f) + num_features = meta["num_features"] + num_classes = meta["num_classes"] + + model_obj.model = nn.Linear(num_features, num_classes) + model_obj.model.load_state_dict(torch.load(MODEL_PATH)) + model_obj.model.eval() + + preds, probs = model_obj.predict(X) + le = joblib.load(ENCODER_PATH) + labels = le.inverse_transform(preds) + + response = [] + for label, prob in zip(labels, probs): + prob_dict = {str(le.classes_[i]): float(p) for i, p in enumerate(prob)} + response.append({"label": str(label), "probabilities": prob_dict}) + + print(response, flush=True) + + return [Tensor("pipeline_output", np.array([json.dumps(response)], dtype=object))] + else: + raise ValueError(f"Unknown mode '{mode}'") + except Exception as e: + print(f"[ERROR] {e}") + return [Tensor("pipeline_output", np.array([f"ERROR: {e}"], dtype=object))] + + def finalize(self): + print("[finalize] Python node finalized", flush=True) \ No newline at end of file