Upload folder using huggingface_hub
Browse files- web_demo.py +1 -0
- web_demo2.py +1 -0
- web_demo_old.py +2 -0
- web_demo_vision.py +1 -0
web_demo.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
from transformers import AutoModel, AutoTokenizer,AutoModelForCausalLM
|
|
|
2 |
|
3 |
import gradio as gr
|
4 |
import mdtex2html
|
|
|
1 |
from transformers import AutoModel, AutoTokenizer,AutoModelForCausalLM
|
2 |
+
import torch
|
3 |
|
4 |
import gradio as gr
|
5 |
import mdtex2html
|
web_demo2.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
from transformers import AutoModel, AutoTokenizer,AutoModelForCausalLM
|
2 |
import streamlit as st
|
3 |
from streamlit_chat import message
|
|
|
4 |
|
5 |
|
6 |
st.set_page_config(
|
|
|
1 |
from transformers import AutoModel, AutoTokenizer,AutoModelForCausalLM
|
2 |
import streamlit as st
|
3 |
from streamlit_chat import message
|
4 |
+
import torch
|
5 |
|
6 |
|
7 |
st.set_page_config(
|
web_demo_old.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1 |
from transformers import AutoModel, AutoTokenizer,AutoModelForCausalLM
|
2 |
import gradio as gr
|
|
|
|
|
3 |
|
4 |
# tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
|
5 |
# model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True).half().cuda()
|
|
|
1 |
from transformers import AutoModel, AutoTokenizer,AutoModelForCausalLM
|
2 |
import gradio as gr
|
3 |
+
import torch
|
4 |
+
|
5 |
|
6 |
# tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
|
7 |
# model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True).half().cuda()
|
web_demo_vision.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
from transformers import AutoModel, AutoTokenizer,AutoModelForCausalLM
|
2 |
import gradio as gr
|
3 |
import mdtex2html
|
|
|
4 |
|
5 |
# tokenizer = AutoTokenizer.from_pretrained("THUDM/visualglm-6b", trust_remote_code=True)
|
6 |
# model = AutoModel.from_pretrained("THUDM/visualglm-6b", trust_remote_code=True).half().cuda()
|
|
|
1 |
from transformers import AutoModel, AutoTokenizer,AutoModelForCausalLM
|
2 |
import gradio as gr
|
3 |
import mdtex2html
|
4 |
+
import torch
|
5 |
|
6 |
# tokenizer = AutoTokenizer.from_pretrained("THUDM/visualglm-6b", trust_remote_code=True)
|
7 |
# model = AutoModel.from_pretrained("THUDM/visualglm-6b", trust_remote_code=True).half().cuda()
|