Skip to content

Commit ca7c03e

Browse files
committed
Trying to fix modal deploy. Working locally at the moment. Detect isnt working in modal
1 parent 7eb27ab commit ca7c03e

File tree

2 files changed

+53
-18
lines changed

2 files changed

+53
-18
lines changed

gradio_app.py

Lines changed: 49 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -177,35 +177,43 @@ def __predict_single_audio(self, audio_file):
177177
return f"Error: Path is not a regular file: {audio_file}. Got type: {type(audio_path)}", None, empty_df
178178

179179
# Preprocess audio file
180+
print(f"Processing audio file: {audio_file}")
180181
input_data = preprocess_audio(str(audio_file))
182+
print("Audio preprocessing complete")
181183

182184
if self.onnx:
183185
# ONNX inference
186+
print("Starting ONNX inference...")
184187
# input_data shape: [1, audio_length] -> squeeze to [audio_length] -> reshape to [1, audio_length]
185188
input_tensor = input_data.squeeze(0).numpy().reshape(1, -1).astype(np.float32)
186189

187190
# Verify input shape matches ONNX model expectations
188191
if self.onnx_session is None:
189-
return "Error: ONNX session not initialized", None
192+
print("Error: ONNX session is None")
193+
return "Error: ONNX session not initialized", None, empty_df
190194

195+
print("Running session.run()...")
191196
outputs = self.onnx_session.run(
192197
['binary_logit', 'tag_logits'],
193198
{'audio': input_tensor}
194199
)
200+
print("Inference complete")
195201
binary_logit, _ = outputs
196202
# Convert numpy scalar to Python float for consistency
197203
ai_prob = float(1 / (1 + np.exp(-binary_logit[0, 0])))
198204
else:
199205
# PyTorch inference
206+
print("Starting PyTorch inference...")
200207
if self.model is None:
201-
return "Error: PyTorch model not initialized", None
208+
return "Error: PyTorch model not initialized", None, empty_df
202209

203210
input_tensor = input_data.to(device)
204211
ai_prob = predict_ai_only(self.model, input_tensor)
205212
# Ensure it's a Python float
206213
if isinstance(ai_prob, torch.Tensor):
207214
ai_prob = ai_prob.item()
208215
ai_prob = float(ai_prob)
216+
print("Inference complete")
209217

210218
is_ai = ai_prob > self.threshold
211219
result = f"**AI-Generated: {'Yes' if is_ai else 'No'}**\n"
@@ -478,6 +486,26 @@ def run_gradio(self):
478486
raise ImportError(f"Gradio is not available. Install with: pip install gradio. Error: {e}")
479487
else:
480488
gradio_module = gr
489+
490+
# Monkey-patch Gradio's hash_file to prevent IsADirectoryError on Modal
491+
# This fixes the issue where Gradio tries to hash the root directory '/'
492+
try:
493+
from gradio import processing_utils
494+
if not hasattr(processing_utils, '_original_hash_file'):
495+
processing_utils._original_hash_file = processing_utils.hash_file
496+
497+
def patched_hash_file(path, *args, **kwargs):
498+
if str(path) == "/" or str(path) == "/favicon.ico":
499+
return "root_bypass_hash"
500+
try:
501+
return processing_utils._original_hash_file(path, *args, **kwargs)
502+
except (IsADirectoryError, PermissionError):
503+
return "directory_bypass_hash"
504+
505+
processing_utils.hash_file = patched_hash_file
506+
print("Applied fix for Gradio IsADirectoryError")
507+
except Exception as e:
508+
print(f"Could not patch Gradio: {e}")
481509

482510
# Configure Gradio cache directory for Modal
483511
if self.is_modal:
@@ -590,7 +618,8 @@ def safe_predict(audio_file):
590618
folder_input = gradio_module.File(
591619
label="Upload ZIP File",
592620
file_count="single",
593-
file_types=[".zip"]
621+
file_types=[".zip"],
622+
value=None # Explicitly set to None
594623
)
595624
batch_predict_btn = gradio_module.Button("Process ZIP File", variant="primary", size="lg")
596625

@@ -614,7 +643,9 @@ def safe_predict(audio_file):
614643

615644
with gradio_module.Row():
616645
result_file_download = gradio_module.File(
617-
label="Download Full Results (CSV/Excel)"
646+
label="Download Full Results (CSV/Excel)",
647+
value=None, # Explicitly set to None
648+
interactive=False # Output only
618649
)
619650

620651
batch_predict_btn.click(
@@ -644,10 +675,12 @@ def safe_predict(audio_file):
644675
if not hasattr(demo, '__call__'):
645676
raise RuntimeError("Gradio Blocks object is not properly initialized as an ASGI app")
646677

647-
# Set max_file_size attribute if it doesn't exist (for file uploads)
678+
# Set max_file_size attribute manually since it's not a constructor arg but required by upload route
679+
# Use a large value (e.g., 1GB) or check if None works (often means unlimited)
680+
# The error "AttributeError: 'Blocks' object has no attribute 'max_file_size'" confirms this is needed
648681
if not hasattr(demo, 'max_file_size'):
649-
demo.max_file_size = 100 # 100MB default
650-
682+
demo.max_file_size = 1024 * 1024 * 1024 # 1 GB limit
683+
651684
# Set root_path to empty string for Modal to prevent '/' path issues
652685
# This prevents Gradio from trying to process '/' as a file path on page load
653686
if hasattr(demo, 'root_path'):
@@ -756,7 +789,8 @@ def create_summary_visualizations(df):
756789
@app.function(
757790
image=image,
758791
volumes={"/models": model_volume},
759-
timeout=300,
792+
timeout=600, # Increased timeout to 10 minutes
793+
container_idle_timeout=300, # Keep container alive for 5 minutes
760794
)
761795
@modal.concurrent(max_inputs=100)
762796
@modal.asgi_app()
@@ -804,12 +838,13 @@ def gradio_app_modal():
804838
except Exception:
805839
pass # If setting fails, continue anyway - not critical
806840

807-
# Return the Blocks object directly - it's the ASGI app
808-
# Don't return demo.app as it breaks internal Gradio functionality (like max_file_size)
809-
if not callable(demo):
810-
raise RuntimeError(f"Gradio Blocks object is not callable: {type(demo)}")
811-
812-
return demo
841+
# Return the underlying FastAPI app which is ASGI compatible
842+
if hasattr(demo, "app"):
843+
return demo.app
844+
elif callable(demo):
845+
return demo
846+
else:
847+
raise RuntimeError(f"Expected an ASGI app, but got {type(demo)}")
813848

814849

815850
if __name__ == "__main__":

scripts/setup_modal.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -150,16 +150,16 @@ def main():
150150
if not args.skip_upload:
151151
upload_success = upload_to_modal(onnx_path, args.volume)
152152
if upload_success:
153-
print("\n✅ Setup complete! You can now deploy to Modal with:")
153+
print("\nSetup complete! You can now deploy to Modal with:")
154154
print(" modal deploy gradio_app.py")
155155
else:
156-
print("\n✅ ONNX conversion complete!")
157-
print(" ⚠️ Upload to Modal failed. Please authenticate and upload manually:")
156+
print("\nONNX conversion complete!")
157+
print(" Upload to Modal failed. Please authenticate and upload manually:")
158158
print(f" modal volume put {args.volume} {onnx_path} model.onnx")
159159
print(" Then deploy with:")
160160
print(" modal deploy gradio_app.py")
161161
else:
162-
print("\n✅ ONNX conversion complete!")
162+
print("\nONNX conversion complete!")
163163
print(f" Upload manually with: modal volume put {args.volume} {onnx_path} model.onnx")
164164

165165
if __name__ == "__main__":

0 commit comments

Comments
 (0)