Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
working version flux
Browse files- app.py +3 -0
- app_flux.py +17 -0
- requirements.txt +1 -0
app.py
CHANGED
@@ -14,6 +14,7 @@ from app_qwen import demo as demo_qwen
|
|
14 |
from app_sambanova import demo as demo_sambanova
|
15 |
from app_together import demo as demo_together
|
16 |
from app_xai import demo as demo_grok
|
|
|
17 |
|
18 |
with gr.Blocks(fill_height=True) as demo:
|
19 |
with gr.Tab("Meta Llama"):
|
@@ -47,6 +48,8 @@ with gr.Blocks(fill_height=True) as demo:
|
|
47 |
demo_together.render()
|
48 |
with gr.Tab("NVIDIA"):
|
49 |
demo_nvidia.render()
|
|
|
|
|
50 |
|
51 |
if __name__ == "__main__":
|
52 |
demo.launch(ssr_mode=False)
|
|
|
14 |
from app_sambanova import demo as demo_sambanova
|
15 |
from app_together import demo as demo_together
|
16 |
from app_xai import demo as demo_grok
|
17 |
+
from app_flux import demo as demo_flux
|
18 |
|
19 |
with gr.Blocks(fill_height=True) as demo:
|
20 |
with gr.Tab("Meta Llama"):
|
|
|
48 |
demo_together.render()
|
49 |
with gr.Tab("NVIDIA"):
|
50 |
demo_nvidia.render()
|
51 |
+
with gr.Tab("Flux"):
|
52 |
+
demo_flux.render()
|
53 |
|
54 |
if __name__ == "__main__":
|
55 |
demo.launch(ssr_mode=False)
|
app_flux.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import replicate_gradio
|
3 |
+
from utils import get_app
|
4 |
+
|
5 |
+
demo = get_app(
|
6 |
+
models=[
|
7 |
+
"black-forest-labs/flux-depth-pro",
|
8 |
+
"black-forest-labs/flux-canny-pro",
|
9 |
+
"black-forest-labs/flux-fill-pro",
|
10 |
+
"black-forest-labs/flux-depth-dev"
|
11 |
+
],
|
12 |
+
default_model="black-forest-labs/flux-depth-pro",
|
13 |
+
src=replicate_gradio.registry,
|
14 |
+
)
|
15 |
+
|
16 |
+
if __name__ == "__main__":
|
17 |
+
demo.launch()
|
requirements.txt
CHANGED
@@ -321,3 +321,4 @@ websockets==12.0
|
|
321 |
# via gradio-client
|
322 |
xai-gradio==0.0.2
|
323 |
# via anychat (pyproject.toml)
|
|
|
|
321 |
# via gradio-client
|
322 |
xai-gradio==0.0.2
|
323 |
# via anychat (pyproject.toml)
|
324 |
+
replicate-gradio @ git+https://github.com/AK391/replicate-gradio.git
|