Spaces:
Sleeping
Sleeping
import os | |
import streamlit as st | |
from langchain_huggingface import HuggingFaceEndpoint | |
# Set the environment variable "m_token" to the value of sec_key | |
sec_key = "YOUR_HUGGING_FACE_API_TOKEN_HERE" | |
os.environ["m_token"] = sec_key | |
# Specify the repository ID of the Hugging Face model you want to use | |
repo_id_mistral = "mistralai/Mistral-7B-Instruct-v0.3" | |
# Streamlit app layout | |
st.title("๐ค Mistral-7B-Instruct-v0.3 ุชุฌุฑุจุฉ ูู ูุฐุฌ ๐ง") | |
# Input text area for user query with enhanced instructions | |
user_query = st.text_area( | |
"โจ Enter your magical query:", | |
height=100, | |
help=""" | |
**Enhanced Prompting Instructions:** | |
- Be clear and specific about what you want to know. | |
- Use natural language to describe your query. | |
- If asking a question, ensure it is well-formed and unambiguous. | |
- For best results, provide context or background information if relevant. | |
""" | |
) | |
# Slider for adjusting the temperature | |
temperature = st.slider( | |
"Temperature", | |
min_value=0.1, | |
max_value=1.0, | |
value=0.7, | |
step=0.1, | |
help=""" | |
**Temperature:** | |
- Lower values (e.g., 0.1) make the output more deterministic and focused. | |
- Higher values (e.g., 1.0) make the output more diverse and creative. | |
""" | |
) | |
# Slider for adjusting the max length | |
max_length = st.slider( | |
"Max Length", | |
min_value=32, | |
max_value=256, | |
value=128, | |
step=32, | |
help=""" | |
**Max Length:** | |
- Controls the maximum number of tokens in the generated response. | |
- Adjust based on the desired length of the response. | |
""" | |
) | |
# Button to trigger the query | |
if st.button("๐ช Cast Spell"): | |
if user_query: | |
# Initialize the HuggingFaceEndpoint for Mistral | |
llm_mistral = HuggingFaceEndpoint( | |
repo_id=repo_id_mistral, | |
max_length=max_length, | |
temperature=temperature, | |
token=sec_key | |
) | |
# Invoke the model with the user's query | |
response_mistral = llm_mistral.invoke(user_query) | |
# Display the response | |
st.markdown("๐ฎ <span class='response'>Response from Mistral-7B-Instruct-v0.3:</span>", unsafe_allow_html=True) | |
st.markdown(f"<span class='response'>{response_mistral}</span>", unsafe_allow_html=True) | |
# Save query and response to session state | |
if 'history' not in st.session_state: | |
st.session_state.history = [] | |
st.session_state.history.append((user_query, response_mistral)) | |
else: | |
st.write("๐จ Please enter a query to cast your spell.") | |
# Button to clear history | |
if st.button("๐๏ธ Clear History"): | |
if 'history' in st.session_state: | |
st.session_state.history = [] | |
st.success("History cleared!") | |
# Display history of queries and responses | |
if 'history' in st.session_state: | |
st.subheader("๐ Scroll of Spells Cast") | |
for query, response_mistral in st.session_state.history: | |
st.write(f"**Query:** {query}") | |
st.markdown(f"<span class='response'>**Response from Mistral-7B-Instruct-v0.3:** {response_mistral}</span>", unsafe_allow_html=True) | |
st.write("---") | |