Skip to content

Commit 8a8c98d

Browse files
Add fastapi endpoint
1 parent 394df9e commit 8a8c98d

File tree

13 files changed

+1013
-1
lines changed

13 files changed

+1013
-1
lines changed

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -197,6 +197,7 @@ using synthetic data that demonstrate BlueCast's full feature set:
197197
| [07_fairness.py](examples/07_fairness.py) | Fairness auditing, demographic parity, equalized odds, conformal fairness |
198198
| [08_eda.py](examples/08_eda.py) | Univariate/bivariate plots, PCA, t-SNE, correlations, data quality, leakage detection |
199199
| [09_bluecast_ai.py](examples/09_bluecast_ai.py) | Multi-agent LLM-powered AutoML (requires API key) |
200+
| [10_serving.py](examples/10_serving.py) | Deploy models as REST APIs, export Dockerfile |
200201

201202
### Kaggle competition results
202203

bluecast/serve/__init__.py

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
"""
2+
BlueCast Serve: One-command API deployment for trained pipelines.
3+
4+
Optional module -- install dependencies with:
5+
pip install bluecast[serve]
6+
7+
Usage::
8+
9+
from bluecast.serve import serve, export_api
10+
11+
# Quick local server
12+
serve(automl, port=8080)
13+
14+
# Export standalone deployment
15+
export_api(automl, output_dir="./deployment")
16+
"""
17+
18+
import logging
19+
from typing import Any
20+
21+
logger = logging.getLogger(__name__)
22+
23+
24+
def serve(
25+
pipeline: Any,
26+
host: str = "0.0.0.0",
27+
port: int = 8080,
28+
log_level: str = "info",
29+
) -> None:
30+
"""Start a FastAPI server to serve predictions from a trained pipeline.
31+
32+
:param pipeline: A trained BlueCast pipeline (any variant).
33+
:param host: Host address to bind to.
34+
:param port: Port to listen on.
35+
:param log_level: Uvicorn log level.
36+
"""
37+
try:
38+
import uvicorn
39+
except ImportError:
40+
raise ImportError(
41+
"uvicorn is required to run the server. "
42+
"Install with: pip install 'bluecast[serve]'"
43+
)
44+
45+
from bluecast.serve.app import create_app
46+
47+
app = create_app(pipeline)
48+
49+
print(f"Starting BlueCast Model API on http://{host}:{port}")
50+
print(f"Swagger docs: http://{host}:{port}/docs")
51+
print(f"Health check: http://{host}:{port}/health")
52+
53+
uvicorn.run(app, host=host, port=port, log_level=log_level)
54+
55+
56+
def export_api(
57+
pipeline: Any,
58+
output_dir: str,
59+
include_docker: bool = True,
60+
) -> str:
61+
"""Export a trained pipeline as a standalone FastAPI deployment.
62+
63+
Creates a directory with ``app.py``, ``model.pkl``, ``requirements.txt``,
64+
an optional ``Dockerfile``, and ``README.md``. The generated app is
65+
self-contained and does not depend on ``bluecast.serve`` at runtime.
66+
67+
:param pipeline: A trained BlueCast pipeline (any variant).
68+
:param output_dir: Path to the output directory.
69+
:param include_docker: Whether to generate a Dockerfile.
70+
:returns: Absolute path to the output directory.
71+
"""
72+
from bluecast.serve.exporter import export_api as _export
73+
74+
return _export(pipeline, output_dir, include_docker=include_docker)

bluecast/serve/app.py

Lines changed: 146 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,146 @@
1+
"""FastAPI app factory for serving BlueCast pipelines."""
2+
3+
import logging
4+
import traceback
5+
from typing import Any, Dict, List
6+
7+
import numpy as np
8+
import pandas as pd
9+
10+
from bluecast.serve.schemas import (
11+
_get_class_problem,
12+
_has_conformal,
13+
build_request_model,
14+
build_schema_response,
15+
)
16+
17+
logger = logging.getLogger(__name__)
18+
19+
20+
def _format_prediction(raw_result: Any, class_problem: str) -> Dict[str, Any]:
21+
"""Format a single prediction result into a JSON-friendly dict."""
22+
if class_problem in ("binary", "multiclass"):
23+
if isinstance(raw_result, tuple) and len(raw_result) == 2:
24+
probs, classes = raw_result
25+
prob_val = probs.tolist() if hasattr(probs, "tolist") else probs
26+
class_val = classes.tolist() if hasattr(classes, "tolist") else classes
27+
if isinstance(prob_val, list) and len(prob_val) == 1:
28+
prob_val = prob_val[0]
29+
if isinstance(class_val, list) and len(class_val) == 1:
30+
class_val = class_val[0]
31+
return {"probabilities": prob_val, "predicted_class": class_val}
32+
else:
33+
val = raw_result.tolist() if hasattr(raw_result, "tolist") else raw_result
34+
return {"prediction": val}
35+
else:
36+
if isinstance(raw_result, (np.ndarray, pd.Series)):
37+
val = raw_result.tolist()
38+
if isinstance(val, list) and len(val) == 1:
39+
val = val[0]
40+
return {"prediction": val}
41+
return {"prediction": raw_result}
42+
43+
44+
def _format_batch_prediction(raw_result: Any, class_problem: str) -> Dict[str, Any]:
45+
"""Format batch predictions."""
46+
if class_problem in ("binary", "multiclass"):
47+
if isinstance(raw_result, tuple) and len(raw_result) == 2:
48+
probs, classes = raw_result
49+
return {
50+
"probabilities": probs.tolist() if hasattr(probs, "tolist") else list(probs),
51+
"predicted_classes": (
52+
classes.tolist() if hasattr(classes, "tolist") else list(classes)
53+
),
54+
"count": len(probs) if hasattr(probs, "__len__") else 1,
55+
}
56+
if isinstance(raw_result, (np.ndarray, pd.Series)):
57+
return {"predictions": raw_result.tolist(), "count": len(raw_result)}
58+
return {"predictions": raw_result}
59+
60+
61+
def create_app(pipeline: Any) -> Any:
62+
"""Create a FastAPI application from a trained BlueCast pipeline.
63+
64+
The app auto-generates request schemas from the pipeline's column metadata
65+
and provides prediction, health, schema, and metrics endpoints.
66+
67+
:param pipeline: A trained BlueCast pipeline (any variant).
68+
:returns: A FastAPI application instance.
69+
"""
70+
try:
71+
from fastapi import FastAPI, HTTPException
72+
except ImportError:
73+
raise ImportError(
74+
"FastAPI is required for bluecast.serve. "
75+
"Install with: pip install 'bluecast[serve]'"
76+
)
77+
78+
class_problem = _get_class_problem(pipeline)
79+
has_conformal = _has_conformal(pipeline)
80+
81+
RequestModel = build_request_model(pipeline)
82+
83+
app = FastAPI(
84+
title="BlueCast Model API",
85+
description=(
86+
f"Auto-generated API for a BlueCast {class_problem} model. "
87+
f"Conformal prediction: {'enabled' if has_conformal else 'disabled'}."
88+
),
89+
version="1.0.0",
90+
)
91+
92+
@app.get("/health")
93+
def health() -> Dict[str, Any]:
94+
return {
95+
"status": "healthy",
96+
"model_type": class_problem,
97+
"conformal_prediction": has_conformal,
98+
}
99+
100+
@app.get("/schema")
101+
def schema() -> Dict[str, Any]:
102+
return build_schema_response(pipeline)
103+
104+
@app.get("/metrics")
105+
def metrics() -> Dict[str, Any]:
106+
eval_metrics = getattr(pipeline, "eval_metrics", None)
107+
if hasattr(pipeline, "_inner"):
108+
eval_metrics = getattr(pipeline._inner, "eval_metrics", eval_metrics)
109+
if eval_metrics is None:
110+
return {"message": "No evaluation metrics available. Use fit_eval() to generate."}
111+
serializable = {}
112+
for k, v in eval_metrics.items():
113+
if isinstance(v, (int, float, str, bool)):
114+
serializable[k] = v
115+
elif isinstance(v, (np.floating, np.integer)):
116+
serializable[k] = float(v)
117+
return serializable
118+
119+
@app.post("/predict")
120+
def predict(request: RequestModel) -> Dict[str, Any]: # type: ignore[valid-type]
121+
try:
122+
data = request.model_dump()
123+
data = {k: v for k, v in data.items() if k != "_placeholder"}
124+
df = pd.DataFrame([data])
125+
result = pipeline.predict(df)
126+
return _format_prediction(result, class_problem)
127+
except Exception as e:
128+
logger.error(f"Prediction failed: {e}\n{traceback.format_exc()}")
129+
raise HTTPException(status_code=400, detail=str(e))
130+
131+
@app.post("/predict/batch")
132+
def predict_batch(requests: List[RequestModel]) -> Dict[str, Any]: # type: ignore[valid-type]
133+
try:
134+
data_list = []
135+
for req in requests:
136+
d = req.model_dump()
137+
d = {k: v for k, v in d.items() if k != "_placeholder"}
138+
data_list.append(d)
139+
df = pd.DataFrame(data_list)
140+
result = pipeline.predict(df)
141+
return _format_batch_prediction(result, class_problem)
142+
except Exception as e:
143+
logger.error(f"Batch prediction failed: {e}\n{traceback.format_exc()}")
144+
raise HTTPException(status_code=400, detail=str(e))
145+
146+
return app

bluecast/serve/exporter.py

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
"""Export a trained BlueCast pipeline as a standalone deployment directory."""
2+
3+
import json
4+
import logging
5+
import os
6+
from typing import Any
7+
8+
from bluecast.general_utils.general_utils import save_to_production
9+
from bluecast.serve.schemas import _extract_column_info, _get_class_problem
10+
11+
logger = logging.getLogger(__name__)
12+
13+
14+
def export_api(
15+
pipeline: Any,
16+
output_dir: str,
17+
include_docker: bool = True,
18+
) -> str:
19+
"""Export a trained pipeline as a self-contained FastAPI deployment.
20+
21+
Creates a directory with a standalone ``app.py``, serialized model,
22+
``requirements.txt``, optional ``Dockerfile``, and a ``README.md``.
23+
The generated app does NOT depend on ``bluecast.serve`` at runtime —
24+
only on ``bluecast`` core, ``fastapi``, and ``uvicorn``.
25+
26+
:param pipeline: A trained BlueCast pipeline (any variant).
27+
:param output_dir: Path to the output directory (created if needed).
28+
:param include_docker: Whether to generate a Dockerfile.
29+
:returns: The absolute path to the output directory.
30+
"""
31+
try:
32+
from jinja2 import Environment, PackageLoader
33+
except ImportError:
34+
raise ImportError(
35+
"jinja2 is required for export_api. "
36+
"It should already be installed as a BlueCast dependency."
37+
)
38+
39+
os.makedirs(output_dir, exist_ok=True)
40+
abs_output = os.path.abspath(output_dir)
41+
42+
columns = _extract_column_info(pipeline)
43+
class_problem = _get_class_problem(pipeline)
44+
45+
# Serialize the pipeline (save_to_production appends file_type to path)
46+
model_base = os.path.join(abs_output, "model")
47+
save_to_production(pipeline, model_base, file_type=".pkl")
48+
logger.info(f"Model saved to {model_base}.pkl")
49+
50+
env = Environment(
51+
loader=PackageLoader("bluecast.serve", "templates"),
52+
keep_trailing_newline=True,
53+
)
54+
55+
# Render app.py
56+
app_template = env.get_template("app_standalone.py.j2")
57+
columns_json = json.dumps(columns, indent=4)
58+
app_code = app_template.render(
59+
columns=columns,
60+
columns_json=columns_json,
61+
class_problem=class_problem,
62+
)
63+
_write(abs_output, "app.py", app_code)
64+
65+
# Render requirements.txt
66+
requirements = "bluecast>=3.0.0\nfastapi>=0.100.0\nuvicorn>=0.20.0\ndill>=0.3.3\n"
67+
_write(abs_output, "requirements.txt", requirements)
68+
69+
# Render Dockerfile
70+
if include_docker:
71+
dockerfile_template = env.get_template("Dockerfile.j2")
72+
_write(abs_output, "Dockerfile", dockerfile_template.render())
73+
74+
# Render README
75+
example_payload = json.dumps(
76+
{col["name"]: 0.0 if col["python_type"] == "float" else "example" for col in columns[:5]},
77+
indent=2,
78+
)
79+
readme_template = env.get_template("README.md.j2")
80+
readme = readme_template.render(
81+
class_problem=class_problem,
82+
example_payload=example_payload,
83+
)
84+
_write(abs_output, "README.md", readme)
85+
86+
logger.info(f"Deployment exported to {abs_output}")
87+
return abs_output
88+
89+
90+
def _write(directory: str, filename: str, content: str) -> None:
91+
path = os.path.join(directory, filename)
92+
with open(path, "w") as f:
93+
f.write(content)
94+
logger.info(f" Written: {path}")

0 commit comments

Comments
 (0)