Spanicin commited on
Commit
e25b41e
1 Parent(s): 77c2d82

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +108 -0
  2. requirements.txt +6 -0
app.py ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from diffusers import AnimateDiffPipeline, LCMScheduler, MotionAdapter
3
+ from diffusers.utils import export_to_video
4
+ from flask import Flask, request, jsonify
5
+ from flask_cors import CORS
6
+ import base64
7
+ import tempfile
8
+ import os
9
+ import threading
10
+
11
+ app = Flask(__name__)
12
+ CORS(app)
13
+
14
+ pipe = None
15
+ app.config['temp_response'] = None
16
+ app.config['generation_thread'] = None
17
+
18
+ def download_pipeline():
19
+ global pipe
20
+ try:
21
+ print('Downloading the model weights')
22
+ # Download and initialize the animation pipeline
23
+ adapter = MotionAdapter.from_pretrained("wangfuyun/AnimateLCM", torch_dtype=torch.float16)
24
+ pipe = AnimateDiffPipeline.from_pretrained("emilianJR/epiCRealism", motion_adapter=adapter, torch_dtype=torch.float16)
25
+ pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config, beta_schedule="linear")
26
+ pipe.load_lora_weights("wangfuyun/AnimateLCM", weight_name="AnimateLCM_sd15_t2v_lora.safetensors", adapter_name="lcm-lora")
27
+ pipe.set_adapters(["lcm-lora"], [0.8])
28
+ pipe.enable_vae_slicing()
29
+ pipe.enable_model_cpu_offload()
30
+ return True
31
+ except Exception as e:
32
+ print(f"Error downloading pipeline: {e}")
33
+ return False
34
+
35
+
36
+ def generate_and_export_animation(prompt):
37
+ global pipe
38
+
39
+ # Ensure the animation pipeline is initialized
40
+ if pipe is None:
41
+ if not download_pipeline():
42
+ return None, "Failed to initialize animation pipeline"
43
+
44
+ try:
45
+ # Generate animation frames
46
+ print('Generating Video frames')
47
+ output = pipe(
48
+ prompt=prompt,
49
+ negative_prompt="bad quality, worse quality, low resolution, blur",
50
+ num_frames=16,
51
+ guidance_scale=2.0,
52
+ num_inference_steps=6
53
+ )
54
+
55
+ # Export frames to a temporary video file
56
+ with tempfile.NamedTemporaryFile(suffix='.mp4', delete=False) as temp_file:
57
+ temp_video_path = temp_file.name
58
+ print('temp_video_path', temp_video_path)
59
+ export_to_video(output.frames[0], temp_video_path)
60
+
61
+ with open(temp_video_path, 'rb') as video_file:
62
+ video_binary = video_file.read()
63
+
64
+ video_base64 = base64.b64encode(video_binary).decode('utf-8')
65
+ os.remove(temp_video_path)
66
+ response_data = {'video_base64': '','status':None}
67
+ response_data['video_base64'] = video_base64
68
+ return response_data
69
+
70
+ except Exception as e:
71
+ print(f"Error generating animation: {e}")
72
+ return None, "Failed to generate animation"
73
+
74
+ def background(prompt):
75
+ with app.app_context():
76
+ temp_response = generate_and_export_animation(prompt)
77
+ json_content = temp_response.get_json()
78
+ app.config['temp_response'] = json_content
79
+
80
+ @app.route('/run', methods=['POST'])
81
+ def handle_animation_request():
82
+
83
+ prompt = request.form.get('prompt')
84
+ if prompt:
85
+ generation_thread = threading.Thread(target=background, args=(prompt,))
86
+ app.config['generation_thread'] = generation_thread
87
+ generation_thread.start()
88
+ response_data = {"message": "Video generation started", "process_id": generation_thread.ident}
89
+
90
+ return jsonify(response_data)
91
+ else:
92
+ return jsonify({"message": "Please provide a valid text prompt."}), 400
93
+
94
+ @app.route('/status', methods=['GET'])
95
+ def check_animation_status():
96
+ process_id = request.args.get('process_id',None)
97
+
98
+ if process_id:
99
+ generation_thread = app.config.get('generation_thread')
100
+ if generation_thread and generation_thread.is_alive():
101
+ return jsonify({"status": "in_progress"}), 200
102
+ elif app.config.get('temp_response'):
103
+ app.config['temp_response']['status'] = 'completed'
104
+ final_response = app.config['temp_response']
105
+ return jsonify(final_response)
106
+
107
+ if __name__ == '__main__':
108
+ app.run(debug=True)
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ flask
2
+ flask_cors
3
+ diffusers
4
+ peft
5
+ torch
6
+ gunicorn