Upload folder using huggingface_hub
Browse files- web_demo.py +2 -1
- web_demo2.py +1 -1
- web_demo_old.py +1 -1
- web_demo_vision.py +1 -1
web_demo.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
-
from transformers import AutoModel, AutoTokenizer
|
|
|
2 |
import gradio as gr
|
3 |
import mdtex2html
|
4 |
|
|
|
1 |
+
from transformers import AutoModel, AutoTokenizer,AutoModelForCausalLM
|
2 |
+
|
3 |
import gradio as gr
|
4 |
import mdtex2html
|
5 |
|
web_demo2.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
from transformers import AutoModel, AutoTokenizer
|
2 |
import streamlit as st
|
3 |
from streamlit_chat import message
|
4 |
|
|
|
1 |
+
from transformers import AutoModel, AutoTokenizer,AutoModelForCausalLM
|
2 |
import streamlit as st
|
3 |
from streamlit_chat import message
|
4 |
|
web_demo_old.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
from transformers import AutoModel, AutoTokenizer
|
2 |
import gradio as gr
|
3 |
|
4 |
# tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
|
|
|
1 |
+
from transformers import AutoModel, AutoTokenizer,AutoModelForCausalLM
|
2 |
import gradio as gr
|
3 |
|
4 |
# tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
|
web_demo_vision.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
from transformers import AutoModel, AutoTokenizer
|
2 |
import gradio as gr
|
3 |
import mdtex2html
|
4 |
|
|
|
1 |
+
from transformers import AutoModel, AutoTokenizer,AutoModelForCausalLM
|
2 |
import gradio as gr
|
3 |
import mdtex2html
|
4 |
|