ElectricAlexis commited on
Commit
8804c95
·
verified ·
1 Parent(s): 6790c7a

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -7
app.py CHANGED
@@ -462,17 +462,39 @@ with gr.Blocks(css=css) as demo:
462
 
463
 
464
  if __name__ == "__main__":
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
465
  # Check if running on Hugging Face Spaces
466
  if "SPACE_ID" in os.environ:
467
- # Running on HF Spaces - use minimal configuration
468
- demo.launch(
469
- server_port=7860, # Match the port HF Spaces expects
470
- share=False, # Explicitly disable sharing
471
- debug=False, # Disable debug mode in production
472
- show_error=True # Show detailed errors if they occur
473
- )
 
 
 
 
 
474
  else:
475
  # Running locally - use custom server settings and share
 
476
  demo.launch(
477
  server_name="0.0.0.0",
478
  server_port=7860,
 
462
 
463
 
464
  if __name__ == "__main__":
465
+ # Configure GPU/CPU handling
466
+ import torch
467
+
468
+ # Function to initialize CUDA safely and verify it's working
469
+ def is_cuda_working():
470
+ try:
471
+ if torch.cuda.is_available():
472
+ # Test CUDA initialization with a small operation
473
+ test_tensor = torch.tensor([1.0], device="cuda")
474
+ _ = test_tensor * 2
475
+ return True
476
+ return False
477
+ except Exception as e:
478
+ print(f"CUDA initialization test failed: {e}")
479
+ return False
480
+
481
  # Check if running on Hugging Face Spaces
482
  if "SPACE_ID" in os.environ:
483
+ cuda_working = is_cuda_working()
484
+ if cuda_working:
485
+ print("GPU is available and working. Using CUDA.")
486
+ # You might want to set some environment variables or configurations here
487
+ os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "max_split_size_mb:128"
488
+ else:
489
+ print("CUDA not working properly. Forcing CPU mode.")
490
+ os.environ["CUDA_VISIBLE_DEVICES"] = ""
491
+ torch.backends.cudnn.enabled = False
492
+
493
+ # Launch with minimal parameters on Spaces
494
+ demo.launch()
495
  else:
496
  # Running locally - use custom server settings and share
497
+ print(f"Running locally with device: {'cuda' if torch.cuda.is_available() else 'cpu'}")
498
  demo.launch(
499
  server_name="0.0.0.0",
500
  server_port=7860,