Skip to content

Commit dec0519

Browse files
committed
Increase upload file size limit & fix:_normalize_prompt_field
1 parent fba0574 commit dec0519

File tree

8 files changed

+51
-12
lines changed

8 files changed

+51
-12
lines changed

backend/utils/be_config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
UPLOAD_FOLDER = os.path.join(BASE_DIR, "upload_files")
2626

2727
# File upload security configuration
28-
MAX_FILE_SIZE = 10 * 1024 * 1024 # 10MB
28+
MAX_FILE_SIZE = 10 * 1024 * 1024 * 1024 # 10GB
2929
MAX_FILENAME_LENGTH = 255
3030
MAX_TASK_ID_LENGTH = 64
3131

backend/utils/security.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ def validate_file_size(file_size: int) -> None:
9595
"""
9696
if file_size > MAX_FILE_SIZE:
9797
raise ValueError(
98-
f"File size exceeds maximum allowed size of {MAX_FILE_SIZE // (1024*1024)}MB"
98+
f"File size exceeds maximum allowed size of {MAX_FILE_SIZE / (1024*1024*1024):.1f}GB"
9999
)
100100

101101

frontend/default.conf

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,11 @@ server {
88
error_log /var/log/nginx/error.log warn;
99
access_log /var/log/nginx/access.log combined if=$loggable;
1010

11+
# Handle large file uploads - move to server level
12+
client_max_body_size 10g;
13+
client_body_timeout 300s;
14+
client_header_timeout 300s;
15+
1116
# Security related headers
1217
add_header X-Frame-Options "SAMEORIGIN";
1318
add_header X-XSS-Protection "1; mode=block";
@@ -31,9 +36,9 @@ server {
3136
proxy_cache_bypass $http_upgrade;
3237

3338
# Increase timeout to match backend timeout (180s + buffer)
34-
proxy_connect_timeout 120s;
35-
proxy_send_timeout 120s;
36-
proxy_read_timeout 120s;
39+
proxy_connect_timeout 300s;
40+
proxy_send_timeout 300s;
41+
proxy_read_timeout 300s;
3742
}
3843

3944
# Block access to upload directory for security

frontend/nginx.conf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ http {
3939

4040
server_tokens off;
4141
client_body_buffer_size 128k;
42-
client_max_body_size 10m;
42+
client_max_body_size 10g;
4343
client_header_buffer_size 1k;
4444
large_client_header_buffers 4 8k;
4545

frontend/src/components/CreateJobForm.tsx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -480,8 +480,8 @@ const CreateJobFormContent: React.FC<CreateJobFormProps> = ({
480480
const handleDatasetFileUpload = async (options: any) => {
481481
const { file, onSuccess, onError } = options;
482482
try {
483-
// Validate file size (10MB limit)
484-
const maxSize = 10 * 1024 * 1024; // 10MB
483+
// Validate file size (1GB limit)
484+
const maxSize = 10 * 1024 * 1024 * 1024; // 10GB
485485
if (file.size > maxSize) {
486486
message.error(
487487
t('components.createJobForm.fileSizeExceedsLimitWithSize', {

frontend/src/utils/constants.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ export const FILE_UPLOAD_CONFIG = {
7070
KEY_FORMATS: '.key,.pem',
7171
COMBINED_FORMATS: '.pem',
7272
MAX_COUNT: 1,
73-
MAX_SIZE: 10 * 1024 * 1024, // 10MB
73+
MAX_SIZE: 10 * 1024 * 1024 * 1024, // 10GB
7474
ALLOWED_TYPES: ['.json', '.txt', '.csv'],
7575
CERTIFICATE_TYPES: ['.pem', '.crt', '.key'],
7676
IMAGE_TYPES: ['.jpg', '.jpeg', '.png', '.gif', '.webp'],

st_engine/engine/locustfile.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -475,6 +475,7 @@ def chat_request(self):
475475
base_request_kwargs, user_prompt = self.request_handler.prepare_request_kwargs(
476476
prompt_data
477477
)
478+
self.task_logger.debug(f"base_request_kwargs: {base_request_kwargs}")
478479
if not base_request_kwargs:
479480
self.task_logger.error(
480481
"Failed to generate request arguments. Skipping task."

st_engine/utils/common.py

Lines changed: 36 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -159,11 +159,35 @@ def encode_image(image_path: str) -> str:
159159

160160
# === DATA PROCESSING ===
161161
def _normalize_prompt_field(prompt: Any) -> str:
162-
"""Normalize prompt field to string."""
162+
"""Normalize prompt field to string.
163+
164+
Supports multiple input formats:
165+
- String: returned as-is
166+
- Simple list: first element converted to string
167+
- Object with 'messages' key: JSON serialized (for chat-like formats)
168+
- Other objects: JSON serialized
169+
"""
163170
if isinstance(prompt, str):
164171
return prompt
165172
elif isinstance(prompt, list) and prompt:
173+
# Handle simple list format like ["prompt text"]
166174
return str(prompt[0])
175+
elif isinstance(prompt, dict):
176+
# Handle complex object formats
177+
try:
178+
# Special handling for chat-like formats with messages
179+
if "messages" in prompt:
180+
# This handles formats like {"messages": [{"role": "user", "content": "..."}]}
181+
return json.dumps(prompt, ensure_ascii=False, separators=(",", ":"))
182+
else:
183+
# Handle other dictionary formats
184+
return json.dumps(prompt, ensure_ascii=False, separators=(",", ":"))
185+
except (TypeError, ValueError) as e:
186+
# Fallback to string representation if JSON serialization fails
187+
logger.warning(
188+
f"Failed to serialize prompt object to JSON: {e}, using string representation"
189+
)
190+
return str(prompt)
167191
else:
168192
return ""
169193

@@ -200,9 +224,18 @@ def _parse_jsonl_line(
200224
prompt_id = json_obj.get("id", line_num)
201225

202226
# Extract and normalize prompt
203-
prompt = _normalize_prompt_field(json_obj.get("prompt"))
227+
raw_prompt = json_obj.get("prompt")
228+
prompt = _normalize_prompt_field(raw_prompt)
204229
if not prompt:
205-
effective_logger.warning(f"Empty prompt in line {line_num}: {line}")
230+
# For debugging, show the type and structure of the raw prompt
231+
prompt_info = f"type: {type(raw_prompt).__name__}"
232+
if isinstance(raw_prompt, dict) and "messages" in raw_prompt:
233+
prompt_info += f", has {len(raw_prompt['messages'])} messages"
234+
elif isinstance(raw_prompt, list):
235+
prompt_info += f", list length: {len(raw_prompt)}"
236+
effective_logger.warning(
237+
f"Empty prompt in line {line_num} ({prompt_info}): {line}..."
238+
)
206239
return None
207240

208241
# Handle images

0 commit comments

Comments
 (0)