import gradio as gr import torch from huggingface_hub import HfApi, login import os # Force CPU usage if needed device = "cuda" if torch.cuda.is_available() else "cpu" print(f"Using device: {device}") def load_model(): try: interface = gr.Interface.load( "models/goofyai/3d_render_style_xl", api_key=os.getenv("HUGGINGFACE_TOKEN"), # API token from environment variable alias="game-icon-generator" ) return interface except Exception as e: print(f"Error loading model: {str(e)}") return None # Create the interface try: interface = load_model() if interface: interface.launch( share=False, server_name="0.0.0.0", server_port=7860, show_error=True ) else: print("Failed to load the interface") except Exception as e: print(f"Error launching interface: {str(e)}")