Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
olmo
Browse files- app_allenai.py +10 -1
app_allenai.py
CHANGED
@@ -2,11 +2,20 @@ import gradio as gr
|
|
2 |
import spaces
|
3 |
import transformers_gradio
|
4 |
|
|
|
5 |
demo = gr.load(name="allenai/Llama-3.1-Tulu-3-8B", src=transformers_gradio.registry)
|
6 |
demo.fn = spaces.GPU()(demo.fn)
|
7 |
|
|
|
|
|
|
|
|
|
|
|
8 |
for fn in demo.fns.values():
|
9 |
fn.api_name = False
|
|
|
|
|
10 |
|
11 |
if __name__ == "__main__":
|
12 |
-
|
|
|
|
2 |
import spaces
|
3 |
import transformers_gradio
|
4 |
|
5 |
+
# Load Llama model
|
6 |
demo = gr.load(name="allenai/Llama-3.1-Tulu-3-8B", src=transformers_gradio.registry)
|
7 |
demo.fn = spaces.GPU()(demo.fn)
|
8 |
|
9 |
+
# Load OLMo model
|
10 |
+
olmo_demo = gr.load(name="akhaliq/olmo-anychat")
|
11 |
+
|
12 |
+
|
13 |
+
# Disable API names for both demos
|
14 |
for fn in demo.fns.values():
|
15 |
fn.api_name = False
|
16 |
+
for fn in olmo_demo.fns.values():
|
17 |
+
fn.api_name = False
|
18 |
|
19 |
if __name__ == "__main__":
|
20 |
+
# Launch both demos
|
21 |
+
gr.TabbedInterface([demo, olmo_demo], ["Llama", "OLMo"]).launch()
|