from ctransformers import AutoModelForCausalLM import gradio as gr greety = """ Follow [Gathnex](https://medium.com/@gathnex) on more update on Genrative AI, LLM, Follow us on [linkedin](https://www.linkedin.com/company/gathnex/) and [Github](https://github.com/gathnexadmin). A special thanks to the Gathnex team members who made a significant contribution to this project. """ llm = AutoModelForCausalLM.from_pretrained("zephyr-7b-beta.Q4_K_S.gguf", model_type='mistral', max_new_tokens = 1096, threads = 3, ) def stream(user_prompt): system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.' E_INST = "" user, assistant = "<|user|>", "<|assistant|>" prompt = f"{system_prompt}{E_INST}\n{user}\n{user_prompt.strip()}{E_INST}\n{assistant}\n" for text in llm(prompt, stream=True, threads=3): print(text, end="", flush=True) css = """ h1 { text-align: center; } #duplicate-button { margin: auto; color: white; background: #1565c0; border-radius: 100vh; } .contain { max-width: 900px; margin: auto; padding-top: 1.5rem; } """ chat_interface = gr.ChatInterface( fn=stream, additional_inputs_accordion_name = "Credentials", #additional_inputs=[ # gr.Textbox(label="OpenAI Key", lines=1), # gr.Textbox(label="Linkedin Access Token", lines=1), #], stop_btn=None, examples=[ ["explain Large language model"], ["what is quantum computing"] ], ) with gr.Blocks(css=css) as demo: gr.HTML("

Gathnex Free LLM Deployment Space

") gr.HTML("

Gathnex AI💬

") gr.DuplicateButton(value="Duplicate Space for private use", elem_id="duplicate-button") chat_interface.render() gr.Markdown(greety) if __name__ == "__main__": demo.queue(max_size=10).launch()