import psutil import gradio as gr from functools import partial from transformers import AutoTokenizer, AutoModelForSeq2SeqLM mem = psutil.virtual_memory() tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True) model = AutoModelForSeq2SeqLM.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True).bfloat16() def chat(query, history=[]): _, history = model.chat(tokenizer, query, history, max_length=512) return history, history description = "ChatGLM-6B 是一个开源的、支持中英双语问答的对话语言模型,基于 General Language Model (GLM) 架构,具有 62 亿参数。 \n" title = "ChatGLM-6B Chatbot" examples = [["Hello"], ["你好"], ["介绍清华"]] chatbot_interface = gr.Interface( fn=chat, title=title, description=description, examples=examples, inputs=["text", "state"], outputs=["chatbot", "state"] ) # chatbot_interface.launch()