Skip to content

Commit 2ff4b57

Browse files
committed
feat:custom dataset
1 parent 84c53c2 commit 2ff4b57

File tree

10 files changed

+155
-62
lines changed

10 files changed

+155
-62
lines changed

README.md

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ LMeterX is a professional large language model performance testing platform that
2121
- **Universal compatibility**: Supports any language/multimodal/CoT model (GPT/Claude/Llama, etc.) service API stress testing
2222
- **Smart load testing**: Precise concurrency control & Real user simulation
2323
- **Professional metrics**: TTFT, TPS, RPS, success/error rate, etc
24-
- **Multi-scenario support**: Text conversations & Multimodal (image+text)
24+
- **Multi-scenario support**: Text conversations & Multimodal (image+text) & Custom dataset
2525
- **Visualize the results**: Performance reports & Model arena
2626
- **Real-time monitoring**: Hierarchical monitoring of tasks and services
2727
- **Enterprise ready**: Docker deployment & Web management console & Scalable architecture
@@ -131,7 +131,6 @@ LMeterX/
131131
## 🗺️ Development Roadmap
132132

133133
### In Development
134-
- [ ] Support for user-defined load test datasets
135134
- [ ] Support for client resource monitoring
136135

137136
### Planned

README_CN.md

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ LMeterX 是一个专业的大语言模型性能测试平台,支持对 LLM 服
2121
- **通用兼容性**:支持任何语言/多模态/CoT模型(GPT/Claude/Llama等)服务API压测
2222
- **智能负载测试**:精确并发控制 & 真实用户模拟
2323
- **专业性能指标**:TTFT、TPS、RPS、成功/错误率等
24-
- **多场景支持**:文本对话 & 多模态(图像+文本)
24+
- **多场景支持**:文本对话 & 多模态(图像+文本)& 自定义数据集
2525
- **可视化结果**:性能报告一键生成 & 多维度模型对比
2626
- **实时监控**:任务和服务的分层监控
2727
- **企业就绪**: Docker部署 & Web管理控制台 &可扩展架构
@@ -131,7 +131,6 @@ LMeterX 采用现代化的技术栈,确保系统的可靠性和可维护性:
131131
## 🗺️ 发展路线图
132132

133133
### 开发中
134-
- [ ] 支持用户自定义压测数据集
135134
- [ ] 支持客户端资源监控
136135

137136
### 规划中

backend/service/task_service.py

Lines changed: 70 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,64 @@
3232
from utils.logger import logger
3333

3434

35+
def _normalize_file_path(file_path: str) -> str:
36+
"""
37+
Normalize file path to ensure cross-service compatibility.
38+
Converts various absolute path formats to relative paths.
39+
40+
Args:
41+
file_path: The file path to normalize
42+
43+
Returns:
44+
The normalized relative path
45+
"""
46+
if not file_path or file_path.strip() == "":
47+
return ""
48+
49+
# Convert various absolute path formats to relative paths
50+
if file_path.startswith(UPLOAD_FOLDER + "/"):
51+
return file_path.replace(UPLOAD_FOLDER + "/", "")
52+
elif file_path.startswith("/app/upload_files/"):
53+
# For backward compatibility with existing Docker paths
54+
return file_path.replace("/app/upload_files/", "")
55+
elif file_path.startswith("/upload_files/"):
56+
# Handle paths starting with /upload_files/
57+
return file_path[len("/upload_files/") :]
58+
59+
return file_path
60+
61+
62+
def _get_cert_config(body: TaskCreateReq) -> Tuple[str, str]:
63+
"""
64+
Get and normalize certificate configuration from the request body.
65+
66+
Args:
67+
body: The task creation request body
68+
69+
Returns:
70+
A tuple of (cert_file, key_file) normalized paths
71+
"""
72+
cert_file = ""
73+
key_file = ""
74+
75+
if body.cert_config:
76+
cert_file = body.cert_config.cert_file or ""
77+
key_file = body.cert_config.key_file or ""
78+
else:
79+
# Try to get certificate configuration from upload service
80+
from service.upload_service import get_task_cert_config
81+
82+
cert_config = get_task_cert_config(body.temp_task_id)
83+
cert_file = cert_config.get("cert_file", "")
84+
key_file = cert_config.get("key_file", "")
85+
86+
# Normalize paths
87+
cert_file = _normalize_file_path(cert_file)
88+
key_file = _normalize_file_path(key_file)
89+
90+
return cert_file, key_file
91+
92+
3593
async def get_tasks_svc(
3694
request: Request,
3795
page: int = Query(1, ge=1, alias="page"),
@@ -251,35 +309,7 @@ async def create_task_svc(request: Request, body: TaskCreateReq):
251309
task_id = str(uuid.uuid4())
252310
logger.info(f"Creating task '{body.name}' with ID: {task_id}")
253311

254-
cert_file = ""
255-
key_file = ""
256-
if body.cert_config:
257-
cert_file = body.cert_config.cert_file or ""
258-
key_file = body.cert_config.key_file or ""
259-
else:
260-
# Try to get certificate configuration from upload service
261-
from service.upload_service import get_task_cert_config
262-
263-
cert_config = get_task_cert_config(body.temp_task_id)
264-
cert_file = cert_config.get("cert_file", "")
265-
key_file = cert_config.get("key_file", "")
266-
267-
# Convert absolute paths to relative paths for cross-service compatibility
268-
if cert_file:
269-
# Convert backend upload path to relative path that st_engine can access
270-
if cert_file.startswith(UPLOAD_FOLDER + "/"):
271-
cert_file = cert_file.replace(UPLOAD_FOLDER + "/", "")
272-
elif cert_file.startswith("/app/upload_files/"):
273-
# For backward compatibility with existing Docker paths
274-
cert_file = cert_file.replace("/app/upload_files/", "")
275-
276-
if key_file:
277-
# Convert backend upload path to relative path that st_engine can access
278-
if key_file.startswith(UPLOAD_FOLDER + "/"):
279-
key_file = key_file.replace(UPLOAD_FOLDER + "/", "")
280-
elif key_file.startswith("/app/upload_files/"):
281-
# For backward compatibility with existing Docker paths
282-
key_file = key_file.replace("/app/upload_files/", "")
312+
cert_file, key_file = _get_cert_config(body)
283313

284314
# Convert headers from a list of objects to a dictionary, then to a JSON string.
285315
headers = {
@@ -297,6 +327,16 @@ async def create_task_svc(request: Request, body: TaskCreateReq):
297327
}
298328
cookies_json = json.dumps(cookies)
299329

330+
# Normalize test_data path to ensure cross-service compatibility
331+
test_data = body.test_data or ""
332+
if (
333+
test_data
334+
and not test_data.strip().lower() in ("", "default")
335+
and not test_data.strip().startswith("{")
336+
):
337+
# If test_data is a file path, convert it to relative path
338+
test_data = _normalize_file_path(test_data)
339+
300340
db = request.state.db
301341
try:
302342
# Convert field_mapping to JSON string if provided
@@ -325,7 +365,7 @@ async def create_task_svc(request: Request, body: TaskCreateReq):
325365
api_path=body.api_path,
326366
request_payload=body.request_payload,
327367
field_mapping=field_mapping_json,
328-
test_data=body.test_data,
368+
test_data=test_data,
329369
)
330370

331371
db.add(new_task)

backend/service/upload_service.py

Lines changed: 34 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -104,22 +104,42 @@ async def process_cert_files(
104104
for file in files:
105105
if file and file.filename and allowed_file(file.filename, "cert"):
106106
filename = secure_filename(file.filename)
107-
file_path = os.path.join(task_upload_dir, filename)
108-
with open(file_path, "wb") as f:
107+
absolute_file_path = os.path.join(task_upload_dir, filename)
108+
with open(absolute_file_path, "wb") as f:
109109
f.write(await file.read())
110110

111111
file_info = {
112112
"originalname": filename,
113-
"path": file_path,
114-
"size": os.path.getsize(file_path),
113+
"path": absolute_file_path, # Keep absolute path for file info
114+
"size": os.path.getsize(absolute_file_path),
115115
}
116116
uploaded_files_info.append(file_info)
117117
logger.info(f"Certificate file uploaded successfully, type: {cert_type}")
118118

119119
# Retrieve existing config for the task, if any.
120120
existing_config = get_task_cert_config(task_id)
121-
# Determine the new config based on the uploaded files.
122-
cert_config = determine_cert_config(uploaded_files_info, cert_type, existing_config)
121+
# Determine the new config based on the uploaded files - use relative paths
122+
uploaded_files_with_relative_paths = []
123+
for file_info in uploaded_files_info:
124+
# Convert absolute path to relative path for cert config
125+
abs_path = str(file_info["path"])
126+
if abs_path.startswith(UPLOAD_FOLDER + "/"):
127+
relative_path = abs_path.replace(UPLOAD_FOLDER + "/", "")
128+
else:
129+
# Fallback: extract relative part
130+
relative_path = os.path.join(task_id, os.path.basename(abs_path))
131+
132+
uploaded_files_with_relative_paths.append(
133+
{
134+
"originalname": file_info["originalname"],
135+
"path": relative_path, # Use relative path for config
136+
"size": file_info["size"],
137+
}
138+
)
139+
140+
cert_config = determine_cert_config(
141+
uploaded_files_with_relative_paths, cert_type, existing_config
142+
)
123143
# Save the updated configuration.
124144
save_task_cert_config(task_id, cert_config)
125145

@@ -146,16 +166,20 @@ async def process_dataset_files(task_id: str, files: List[UploadFile]):
146166
for file in files:
147167
if file and file.filename and allowed_file(file.filename, "dataset"):
148168
filename = secure_filename(file.filename)
149-
file_path = os.path.join(task_upload_dir, filename)
150-
with open(file_path, "wb") as f:
169+
absolute_file_path = os.path.join(task_upload_dir, filename)
170+
with open(absolute_file_path, "wb") as f:
151171
f.write(await file.read())
152172

173+
# Return relative path from upload folder for cross-service compatibility
174+
relative_file_path = os.path.join(task_id, filename)
175+
153176
file_info = {
154177
"originalname": filename,
155-
"path": file_path,
156-
"size": os.path.getsize(file_path),
178+
"path": absolute_file_path, # Keep absolute path for file info
179+
"size": os.path.getsize(absolute_file_path),
157180
}
158181
uploaded_files_info.append(file_info)
182+
file_path = relative_file_path # Use relative path for test_data
159183
logger.info(f"Dataset file uploaded successfully: {filename}")
160184

161185
return uploaded_files_info, file_path

backend/utils/be_config.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,4 +16,10 @@
1616
LOG_TASK_DIR = os.path.join(LOG_DIR, "task")
1717

1818
# Directory for storing uploaded files
19-
UPLOAD_FOLDER = os.path.join(BASE_DIR, "upload_files")
19+
# Handle different environments: Docker vs local development
20+
if os.path.exists("/app") and os.getcwd().startswith("/app"):
21+
# Docker environment: use /app/upload_files (matches docker-compose volume mapping)
22+
UPLOAD_FOLDER = "/app/upload_files"
23+
else:
24+
# Local development: use project root upload_files
25+
UPLOAD_FOLDER = os.path.join(BASE_DIR, "upload_files")

docker-compose.dev.yml

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ services:
4040
- FLASK_DEBUG=false
4141
volumes:
4242
- ./logs:/logs
43-
- upload_files:/app/upload_files
43+
- ./upload_files:/app/upload_files
4444
ports:
4545
- "5001:5001"
4646
healthcheck:
@@ -66,11 +66,9 @@ services:
6666
- DB_USER=root
6767
- DB_PASSWORD=lmeterx123
6868
- DB_NAME=lmeterx
69-
- SECRET_KEY=your_secret_key_here
70-
- FLASK_DEBUG=false
7169
volumes:
7270
- ./logs:/logs
73-
- upload_files:/app/upload_files
71+
- ./upload_files:/app/upload_files
7472
ports:
7573
- "5002:5002"
7674
healthcheck:
@@ -102,10 +100,9 @@ services:
102100
volumes:
103101
- ./frontend/nginx.conf:/etc/nginx/conf.d/default.conf
104102
- ./frontend/nginx-map.conf:/etc/nginx/conf.d/map.conf
105-
- upload_files:/usr/share/nginx/html/uploads
103+
- ./upload_files:/usr/share/nginx/html/uploads
106104
volumes:
107105
mysql_data:
108-
upload_files:
109106

110107
networks:
111108
default:

docker-compose.yml

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ services:
3434
- DB_NAME=lmeterx
3535
volumes:
3636
- ./logs:/logs
37-
- upload_files:/app/upload_files
37+
- ./upload_files:/app/upload_files
3838
ports:
3939
- "5001:5001"
4040
healthcheck:
@@ -60,7 +60,7 @@ services:
6060
- DB_NAME=lmeterx
6161
volumes:
6262
- ./logs:/logs
63-
- upload_files:/app/upload_files
63+
- ./upload_files:/app/upload_files
6464
ports:
6565
- "5002:5002"
6666
healthcheck:
@@ -88,11 +88,10 @@ services:
8888
timeout: 5s
8989
retries: 3
9090
volumes:
91-
- upload_files:/usr/share/nginx/html/uploads
91+
- ./upload_files:/usr/share/nginx/html/uploads
9292

9393
volumes:
9494
mysql_data:
95-
upload_files:
9695

9796
networks:
9897
default:

st_engine/engine/locustfile.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -698,7 +698,7 @@ def chat_request(self):
698698
base_request_kwargs, user_prompt = self.request_handler.prepare_request_kwargs(
699699
prompt_data
700700
)
701-
self.task_logger.info(f"base_request_kwargs: {base_request_kwargs}")
701+
# self.task_logger.info(f"base_request_kwargs: {base_request_kwargs}")
702702

703703
if not base_request_kwargs:
704704
self.task_logger.error(

st_engine/utils/config.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,15 @@
2020
DATA_DIR = os.path.join(ST_ENGINE_DIR, "data")
2121
PROMPTS_DIR = os.path.join(DATA_DIR, "prompts")
2222
IMAGES_DIR = os.path.join(DATA_DIR, "pic")
23-
UPLOAD_FOLDER = os.path.join(BASE_DIR, "upload_files")
23+
24+
# === UPLOAD PATHS ===
25+
# Handle different environments: Docker vs local development
26+
if os.path.exists("/app") and os.getcwd().startswith("/app"):
27+
# Docker environment: use /app/upload_files
28+
UPLOAD_FOLDER = "/app/upload_files"
29+
else:
30+
# Local development: use project root upload_files
31+
UPLOAD_FOLDER = os.path.join(BASE_DIR, "upload_files")
2432

2533
# === TASK STATUS CONSTANTS ===
2634
TASK_STATUS_CREATED = "created"

st_engine/utils/tools.py

Lines changed: 26 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,16 @@ def resolve_upload_file_path(file_path: str) -> str:
9292

9393
normalized_path = os.path.normpath(file_path)
9494

95-
if os.path.isabs(normalized_path):
95+
# Handle special case: paths starting with /upload_files/ should be treated as relative
96+
if normalized_path.startswith("/upload_files/"):
97+
# Extract relative part after /upload_files/
98+
relative_part = normalized_path[len("/upload_files/") :]
99+
if relative_part.startswith("../"):
100+
raise ValueError(
101+
f"Invalid relative path (contains parent directory): {file_path}"
102+
)
103+
absolute_path = os.path.join(UPLOAD_FOLDER, relative_part)
104+
elif os.path.isabs(normalized_path):
96105
# Handle absolute path - ensure it's within upload folder
97106
upload_folder_abs = os.path.abspath(UPLOAD_FOLDER)
98107
if not normalized_path.startswith(upload_folder_abs):
@@ -504,10 +513,22 @@ def init_prompt_queue(
504513
effective_logger.info("Processing test_data as JSONL content string")
505514
return init_prompt_queue_from_string(test_data, task_logger)
506515

507-
# Case 4: test_data is an existing file path
508-
if os.path.exists(test_data):
509-
effective_logger.info(f"Processing test_data as file path: {test_data}")
510-
return init_prompt_queue_from_file(test_data, task_logger)
516+
# Case 4: test_data is a file path - handle both absolute and relative paths
517+
effective_logger.info(f"Processing test_data as file path: {test_data}")
518+
519+
# Try to resolve the path using FilePathUtils for upload files
520+
try:
521+
# First, try to resolve as an upload file path (handles both relative and absolute paths)
522+
resolved_path = FilePathUtils.resolve_upload_file_path(test_data)
523+
effective_logger.info(f"Resolved upload file path: {resolved_path}")
524+
return init_prompt_queue_from_file(resolved_path, task_logger)
525+
except (ValueError, FileNotFoundError) as e:
526+
effective_logger.warning(f"Failed to resolve as upload file path: {e}")
527+
528+
# Fallback: try as direct file path for backward compatibility
529+
if os.path.exists(test_data):
530+
effective_logger.info(f"Using direct file path: {test_data}")
531+
return init_prompt_queue_from_file(test_data, task_logger)
511532

512533
# Invalid test_data provided
513534
raise ValueError(

0 commit comments

Comments
 (0)