Spaces:
Sleeping
Sleeping
Madhuri123
commited on
Commit
•
ba9839b
1
Parent(s):
d061a4c
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,8 @@ import requests
|
|
3 |
import torch
|
4 |
from PIL import Image
|
5 |
from transformers import MllamaForConditionalGeneration, AutoProcessor
|
6 |
-
|
|
|
7 |
HF_TOKEN=st.secrets["newfinegrained"]
|
8 |
|
9 |
def load_model_and_processor(model_id):
|
@@ -11,8 +12,7 @@ def load_model_and_processor(model_id):
|
|
11 |
model = MllamaForConditionalGeneration.from_pretrained(
|
12 |
model_id,
|
13 |
torch_dtype=torch.bfloat16,
|
14 |
-
device_map="auto"
|
15 |
-
use_auth_token=HF_TOKEN
|
16 |
)
|
17 |
processor = AutoProcessor.from_pretrained(model_id)
|
18 |
return model, processor
|
|
|
3 |
import torch
|
4 |
from PIL import Image
|
5 |
from transformers import MllamaForConditionalGeneration, AutoProcessor
|
6 |
+
from huggingface_hub import login
|
7 |
+
login()
|
8 |
HF_TOKEN=st.secrets["newfinegrained"]
|
9 |
|
10 |
def load_model_and_processor(model_id):
|
|
|
12 |
model = MllamaForConditionalGeneration.from_pretrained(
|
13 |
model_id,
|
14 |
torch_dtype=torch.bfloat16,
|
15 |
+
device_map="auto"
|
|
|
16 |
)
|
17 |
processor = AutoProcessor.from_pretrained(model_id)
|
18 |
return model, processor
|