YaserDS-777 commited on
Commit
0429aca
โ€ข
1 Parent(s): 58420aa

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +133 -0
app.py ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ from langchain_huggingface import HuggingFaceEndpoint
4
+
5
+ # Set the environment variable "HUGGINGFACEHUB_API_TOKEN" to the value of sec_key
6
+ sec_key = ""
7
+ os.environ["HUGGINGFACEHUB_API_TOKEN"] = sec_key
8
+
9
+ # Specify the repository IDs of the Hugging Face models you want to use
10
+ repo_id_mistral = "mistralai/Mistral-7B-Instruct-v0.3"
11
+ repo_id_llama3 = "meta-llama/Meta-Llama-3-8B" # Replace with the actual repo ID for Llama3
12
+
13
+ # Streamlit app layout
14
+ st.title("๐Ÿค– AI Query Wizard ๐Ÿง™")
15
+
16
+ # Custom background and styling
17
+ st.markdown(
18
+ """
19
+ <style>
20
+
21
+ .stTextInput, .stButton {
22
+ background-color: rgba(255, 255, 255, 0.8);
23
+ border-radius: 10px;
24
+ padding: 10px;
25
+ }
26
+ .response {
27
+ color: #FFD700; /* Gold color for responses */
28
+ font-weight: bold;
29
+ }
30
+ </style>
31
+ """,
32
+ unsafe_allow_html=True
33
+ )
34
+
35
+ # Input text area for user query with enhanced instructions
36
+ user_query = st.text_area(
37
+ "โœจ Enter your magical query:",
38
+ height=100,
39
+ help="""
40
+ **Enhanced Prompting Instructions:**
41
+ - Be clear and specific about what you want to know.
42
+ - Use natural language to describe your query.
43
+ - If asking a question, ensure it is well-formed and unambiguous.
44
+ - For best results, provide context or background information if relevant.
45
+ """
46
+ )
47
+
48
+ # Slider for adjusting the temperature
49
+ temperature = st.slider(
50
+ "Temperature",
51
+ min_value=0.1,
52
+ max_value=1.0,
53
+ value=0.7,
54
+ step=0.1,
55
+ help="""
56
+ **Temperature:**
57
+ - Lower values (e.g., 0.1) make the output more deterministic and focused.
58
+ - Higher values (e.g., 1.0) make the output more diverse and creative.
59
+ """
60
+ )
61
+
62
+ # Slider for adjusting the max length
63
+ max_length = st.slider(
64
+ "Max Length",
65
+ min_value=32,
66
+ max_value=256,
67
+ value=128,
68
+ step=32,
69
+ help="""
70
+ **Max Length:**
71
+ - Controls the maximum number of tokens in the generated response.
72
+ - Adjust based on the desired length of the response.
73
+ """
74
+ )
75
+
76
+ # Button to trigger the query
77
+ if st.button("๐Ÿช„ Cast Spell"):
78
+ if user_query:
79
+ # Initialize the HuggingFaceEndpoint for Mistral
80
+ llm_mistral = HuggingFaceEndpoint(
81
+ repo_id=repo_id_mistral,
82
+ max_length=max_length,
83
+ temperature=temperature,
84
+ token=sec_key
85
+ )
86
+
87
+ # Initialize the HuggingFaceEndpoint for Llama3
88
+ llm_llama3 = HuggingFaceEndpoint(
89
+ repo_id=repo_id_llama3,
90
+ max_length=max_length,
91
+ temperature=temperature,
92
+ token=sec_key
93
+ )
94
+
95
+ # Invoke both models with the user's query
96
+ response_mistral = llm_mistral.invoke(user_query)
97
+ response_llama3 = llm_llama3.invoke(user_query)
98
+
99
+ # Display the responses side by side
100
+ col1, col2 = st.columns(2)
101
+
102
+ with col1:
103
+ st.markdown("๐Ÿ”ฎ <span class='response'>Response from Mistral-7B-Instruct-v0.3:</span>", unsafe_allow_html=True)
104
+ st.markdown(f"<span class='response'>{response_mistral}</span>", unsafe_allow_html=True)
105
+
106
+ with col2:
107
+ st.markdown("๐Ÿ”ฎ <span class='response'>Response from Llama3:</span>", unsafe_allow_html=True)
108
+ st.markdown(f"<span class='response'>{response_llama3}</span>", unsafe_allow_html=True)
109
+
110
+ # Save query and responses to session state
111
+ if 'history' not in st.session_state:
112
+ st.session_state.history = []
113
+ st.session_state.history.append((user_query, response_mistral, response_llama3))
114
+ else:
115
+ st.write("๐Ÿšจ Please enter a query to cast your spell.")
116
+
117
+ # Button to clear history
118
+ if st.button("๐Ÿ—‘๏ธ Clear History"):
119
+ if 'history' in st.session_state:
120
+ st.session_state.history = []
121
+ st.success("History cleared!")
122
+
123
+ # Display history of queries and responses
124
+ if 'history' in st.session_state:
125
+ st.subheader("๐Ÿ“œ Scroll of Spells Cast")
126
+ for query, response_mistral, response_llama3 in st.session_state.history:
127
+ st.write(f"**Query:** {query}")
128
+ col1, col2 = st.columns(2)
129
+ with col1:
130
+ st.markdown(f"<span class='response'>**Response from Mistral-7B-Instruct-v0.3:** {response_mistral}</span>", unsafe_allow_html=True)
131
+ with col2:
132
+ st.markdown(f"<span class='response'>**Response from Llama3:** {response_llama3}</span>", unsafe_allow_html=True)
133
+ st.write("---")