Spaces:
Running
on
Zero
Running
on
Zero
import gradio as gr | |
import numpy as np | |
import random | |
import multiprocessing | |
import subprocess | |
import sys | |
import time | |
import signal | |
import json | |
import os | |
import requests | |
from loguru import logger | |
from decouple import config | |
from pathlib import Path | |
from PIL import Image | |
import io | |
URL = config('URL') | |
OUTPUT_DIR = config('OUTPUT_DIR') | |
INPUT_DIR = config('INPUT_DIR') | |
COMF_PATH = config('COMF_PATH') | |
import torch | |
#import spaces | |
print(f"Is CUDA available: {torch.cuda.is_available()}") | |
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}") | |
print(torch.version.cuda) | |
device = torch.cuda.get_device_name(torch.cuda.current_device()) | |
print(device) | |
def get_latest_image(folder): | |
files = os.listdir(folder) | |
image_files = [f for f in files if f.lower().endswith(('.png', '.jpg', '.jpeg'))] | |
image_files.sort(key=lambda x: os.path.getmtime(os.path.join(folder, x))) | |
latest_image = os.path.join(folder, image_files[-1]) if image_files else None | |
return latest_image | |
def start_queue(prompt_workflow): | |
p = {"prompt": prompt_workflow} | |
data = json.dumps(p).encode('utf-8') | |
requests.post(URL, data=data) | |
def check_server_ready(): | |
try: | |
response = requests.get(f"http://127.0.0.1:8188/history/123", timeout=5) | |
return response.status_code == 200 | |
except requests.RequestException: | |
return False | |
#@spaces.GPU | |
def generate_image(prompt, image): | |
previous_image = get_latest_image(OUTPUT_DIR) | |
image = Image.fromarray(image) | |
image.save(OUTPUT_DIR+'/input.png', format='PNG') | |
# Запускаем скрипт как подпроцесс | |
process = subprocess.Popen([sys.executable, COMF_PATH, "--listen", "127.0.0.1"]) | |
logger.debug(f'Subprocess started with PID: {process.pid}') | |
try: | |
# Ожидание запуска сервера | |
for _ in range(20): # Максимум 20 секунд ожидания | |
if check_server_ready(): # Вам нужно реализовать эту функцию | |
break | |
time.sleep(1) | |
else: | |
raise TimeoutError("Server did not start in time") | |
start_queue(prompt) | |
# Ожидание нового изображения | |
timeout = 400 # Максимальное время ожидания в секундах | |
start_time = time.time() | |
while time.time() - start_time < timeout: | |
latest_image = get_latest_image(OUTPUT_DIR) | |
if latest_image != previous_image: | |
return latest_image | |
time.sleep(1) | |
raise TimeoutError("New image was not generated in time") | |
except Exception as e: | |
logger.error(f"Error in generate_image: {e}") | |
raise | |
finally: | |
# Завершаем подпроцесс | |
if process.poll() is None: | |
process.terminate() | |
try: | |
process.wait(timeout=5) | |
except subprocess.TimeoutExpired: | |
process.kill() | |
logger.error("No new image was generated") | |
return None | |
if __name__ == "__main__": | |
demo = gr.Interface(fn=generate_image, inputs=["text", "image"], outputs=["image"]) | |
demo.launch(debug=True) | |
logger.debug('demo.launch()') | |
logger.info("Основной скрипт завершил работу.") |