Update app.py
Browse files
app.py
CHANGED
@@ -5,7 +5,7 @@ from llama_mesh import LLaMAMesh
|
|
5 |
from mast3r import MASt3R
|
6 |
from utils import apply_gradient_color
|
7 |
from utils import create_image_grid
|
8 |
-
|
9 |
import torch
|
10 |
|
11 |
DESCRIPTION = '''
|
@@ -16,10 +16,11 @@ DESCRIPTION = '''
|
|
16 |
'''
|
17 |
|
18 |
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
19 |
|
20 |
# Initialize models
|
21 |
mv_diff = MultiViewDiffusion(device=DEVICE)
|
22 |
-
vllm = VisionLLM(device=DEVICE)
|
23 |
llama_mesh = LLaMAMesh(device=DEVICE)
|
24 |
mast3r = MASt3R(device=DEVICE)
|
25 |
|
@@ -95,6 +96,4 @@ with gr.Blocks() as demo:
|
|
95 |
fn=apply_gradient_color,
|
96 |
inputs=[future_mesh_input],
|
97 |
outputs=[future_mesh_output_2]
|
98 |
-
)
|
99 |
-
|
100 |
-
demo.launch()
|
|
|
5 |
from mast3r import MASt3R
|
6 |
from utils import apply_gradient_color
|
7 |
from utils import create_image_grid
|
8 |
+
import os
|
9 |
import torch
|
10 |
|
11 |
DESCRIPTION = '''
|
|
|
16 |
'''
|
17 |
|
18 |
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
19 |
+
HF_TOKEN = os.environ.get("HF_TOKEN", None)
|
20 |
|
21 |
# Initialize models
|
22 |
mv_diff = MultiViewDiffusion(device=DEVICE)
|
23 |
+
vllm = VisionLLM(device=DEVICE, use_auth_token=HF_TOKEN)
|
24 |
llama_mesh = LLaMAMesh(device=DEVICE)
|
25 |
mast3r = MASt3R(device=DEVICE)
|
26 |
|
|
|
96 |
fn=apply_gradient_color,
|
97 |
inputs=[future_mesh_input],
|
98 |
outputs=[future_mesh_output_2]
|
99 |
+
)
|
|
|
|