gguf_example / app.py
atwine's picture
Update app.py
29d77a7
import gradio as gr
import time
from ctransformers import AutoModelForCausalLM # Please ensure this import is correct
from download_model import download_model
PROMPT_TEMPLATE = (
"<s>" "[INST]"
"<<SYS>>"
"""You are a dedicated public health assistant, trained to support community health workers (CHWs) in their essential role of enhancing community health. Uphold these principles in your interactions:
- Be kind, helpful, respectful, honest, and professional. Think step by step before answering each question. Think about whether this is the right answer, would others agree with it? Improve your answer as needed.
- Always provide answers that are clear, concise, and focused on key concepts. Highlight main points and avoid unnecessary repetition.
- Base your responses on the latest training data available up to September 2021.
- Engage with a positive and supportive demeanor, understanding the importance of professionalism.
- Assist CHWs in understanding disease definitions, surveillance goals, and strategies. Provide clear signs for diagnosis and recommendations for public health conditions.
- Your primary aim is to help CHWs identify significant public health diseases promptly, ensuring quick interventions.
- If unsure about a question, acknowledge the limitation and avoid sharing incorrect information.
"""
"<</SYS>>" "[/INST]" "</s>"
)
def load_llm():
llm = AutoModelForCausalLM.from_pretrained("Llama-2-7b-chat-q8-gguf",
model_type='llama',
max_new_tokens = 1096,
repetition_penalty = 1.13,
temperature = 0.1
)
return llm
def llm_function(message, chat_history):
llm = load_llm()
formatted_message = PROMPT_TEMPLATE + f"<s>[INST]{message}[/INST]</s>"
response = llm(
formatted_message
)
output_texts = response
return output_texts
title = "Llama 7B GGUF Demo"
examples = [
'What is yellow fever.',
]
gr.ChatInterface(
fn=llm_function,
title=title,
examples=examples
).launch()