Spaces:
Sleeping
Sleeping
File size: 2,238 Bytes
ad7aaa9 09e3194 b1f46eb 09e3194 b1f46eb 9446222 09e3194 a7866db 09e3194 a7866db 09e3194 b1f46eb 09e3194 b1f46eb da6c0eb 09e3194 da6c0eb 09e3194 b1f46eb a7866db b1f46eb 09e3194 b1f46eb da6c0eb 09e3194 b1f46eb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 |
import streamlit as st
from llama_cpp import Llama
# Initialize the model
llm = Llama.from_pretrained(
repo_id="Divyansh12/check",
filename="unsloth.F16.gguf", # Ensure this matches your model file name
verbose=True,
n_ctx=32768,
n_threads=2,
chat_format="chatml"
)
# Define the function to get responses from the model
def respond(message, history):
messages = []
for user_message, assistant_message in history:
if user_message:
messages.append({"role": "user", "content": user_message})
if assistant_message:
messages.append({"role": "assistant", "content": assistant_message})
messages.append({"role": "user", "content": message})
response = ""
# Stream the response from the model
response_stream = llm.create_chat_completion(
messages=messages,
stream=True,
max_tokens=512, # Use a default value for simplicity
temperature=0.7, # Use a default value for simplicity
top_p=0.95 # Use a default value for simplicity
)
# Collect the response chunks
for chunk in response_stream:
if len(chunk['choices'][0]["delta"]) != 0 and "content" in chunk['choices'][0]["delta"]:
response += chunk['choices'][0]["delta"]["content"]
return response # Return the full response
# Streamlit UI
st.title("Simple Chatbot")
st.write("### Interact with the chatbot!")
# User input field
user_message = st.text_area("Your Message:", "")
# Chat history
if 'history' not in st.session_state:
st.session_state.history = []
# Button to send the message
if st.button("Send"):
if user_message: # Check if user has entered a message
# Get the response from the model
response = respond(user_message, st.session_state.history)
# Add user message and model response to history
st.session_state.history.append((user_message, response))
# Clear the input field after sending
user_message = "" # Reset user_message to clear input
# Display the chat history
st.write("### Chat History")
for user_msg, assistant_msg in st.session_state.history:
st.write(f"**User:** {user_msg}")
st.write(f"**Assistant:** {assistant_msg}")
|