Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 21 additions & 21 deletions src/endpoints/data/data_download.py
Original file line number Diff line number Diff line change
@@ -1,32 +1,32 @@
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import List, Any, Optional
import logging

router = APIRouter()
logger = logging.getLogger(__name__)


class RowMatcher(BaseModel):
columnName: str
operation: str
values: List[Any]
import pandas as pd
from fastapi import APIRouter, HTTPException

from src.service.utils.download import (
DataRequestPayload,
DataResponsePayload,
apply_filters, # ← New utility function
load_model_dataframe,
)

class DataRequestPayload(BaseModel):
modelId: str
matchAny: Optional[List[RowMatcher]] = None
matchAll: Optional[List[RowMatcher]] = None
matchNone: Optional[List[RowMatcher]] = None
router = APIRouter()
logger = logging.getLogger(__name__)


@router.post("/data/download")
async def download_data(payload: DataRequestPayload):
"""Download model data."""
async def download_data(payload: DataRequestPayload) -> DataResponsePayload:
"""Download model data with filtering."""
try:
logger.info(f"Received data download request for model: {payload.modelId}")
# TODO: Implement
return {"status": "success", "data": []}
df = await load_model_dataframe(payload.modelId)
if df.empty:
return DataResponsePayload(dataCSV="")
df = apply_filters(df, payload)
csv_data = df.to_csv(index=False)
return DataResponsePayload(dataCSV=csv_data)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error downloading data: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error downloading data: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error downloading data: {str(e)}")
36 changes: 24 additions & 12 deletions src/endpoints/data/data_upload.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,39 @@
import logging
import uuid
from datetime import datetime
from typing import Any, Dict, List, Optional

import numpy as np
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import Dict, Any
import logging

from src.service.constants import INPUT_SUFFIX, METADATA_SUFFIX, OUTPUT_SUFFIX
from src.service.data.modelmesh_parser import ModelMeshPayloadParser
from src.service.data.storage import get_storage_interface
from src.service.utils.upload import process_upload_request

router = APIRouter()
logger = logging.getLogger(__name__)


class ModelInferJointPayload(BaseModel):
class UploadPayload(BaseModel):
model_name: str
data_tag: str = None
data_tag: Optional[str] = None
is_ground_truth: bool = False
request: Dict[str, Any]
response: Dict[str, Any]
response: Optional[Dict[str, Any]] = None


@router.post("/data/upload")
async def upload_data(payload: ModelInferJointPayload):
"""Upload a batch of model data to TrustyAI."""
async def upload(payload: UploadPayload) -> Dict[str, str]:
"""Upload model data - regular or ground truth."""
try:
logger.info(f"Received data upload for model: {payload.model_name}")
# TODO: Implement
return {"status": "success", "message": "Data uploaded successfully"}
logger.info(f"Received upload request for model: {payload.model_name}")
result = await process_upload_request(payload)
logger.info(f"Upload completed for model: {payload.model_name}")
return result
except HTTPException:
raise
except Exception as e:
logger.error(f"Error uploading data: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error uploading data: {str(e)}")
logger.error(f"Unexpected error in upload endpoint for model {payload.model_name}: {str(e)}", exc_info=True)
raise HTTPException(500, f"Internal server error: {str(e)}")
Loading
Loading