ogegadavis254 commited on
Commit
1e8b2e8
·
verified ·
1 Parent(s): f3f1044

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -14
app.py CHANGED
@@ -4,22 +4,19 @@
4
  """
5
 
6
  import streamlit as st
 
7
  import os
8
  import requests
9
  import json
10
 
11
  entire_assistant_response = ""
12
 
13
- def get_streamed_response(message, history):
14
-
15
- all_message = [{
16
- "role": "system",
17
- "content": "You are BibleLearnerAI, a Christian Bible AI, you know the bible more than anything else. You focus on religion, believing in Christianity as truth, and provide relevant Bible verses. When greeted, respond with a religious greeting and introduce yourself. Use MD highlighting to make easyer to read."
18
- }]
19
 
20
  for human, assistant in history:
21
- all_message.append({"role": "user", "content": human })
22
- all_message.append({"role": "assistant", "content":assistant})
23
 
24
  global entire_assistant_response
25
  entire_assistant_response = "" # Reset the entire assistant response
@@ -28,7 +25,7 @@ def get_streamed_response(message, history):
28
 
29
  url = "https://api.together.xyz/v1/chat/completions"
30
  payload = {
31
- "model": "NousResearch/Nous-Hermes-2-Yi-34B",
32
  "temperature": 1.05,
33
  "top_p": 0.9,
34
  "top_k": 50,
@@ -84,15 +81,45 @@ st.title("Simple Chatbot")
84
  if "messages" not in st.session_state:
85
  st.session_state.messages = []
86
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
  # Accept user input
88
- if prompt := st.text_input("You:", key="user_input"):
89
 
90
  # Display user message
91
- with st.spinner("Chatbot is typing..."):
92
  st.session_state.messages.append({"role": "user", "content": prompt})
93
 
94
- # Call BibleLearnerAI model to get response
95
- response_stream = get_streamed_response(prompt, [(m["content"] for m in st.session_state.messages[:-1])])
96
  for response in response_stream:
97
  st.session_state.messages.append({"role": "assistant", "content": response})
98
 
@@ -101,4 +128,4 @@ for message in st.session_state.messages:
101
  if message["role"] == "user":
102
  st.text_input("You:", value=message["content"], disabled=True)
103
  else:
104
- st.text_input("BibleLearnerAI:", value=message["content"], disabled=True)
 
4
  """
5
 
6
  import streamlit as st
7
+ from openai import OpenAI
8
  import os
9
  import requests
10
  import json
11
 
12
  entire_assistant_response = ""
13
 
14
+ def get_streamed_response(message, history, model):
15
+ all_message = []
 
 
 
 
16
 
17
  for human, assistant in history:
18
+ all_message.append({"role": "user", "content": human})
19
+ all_message.append({"role": "assistant", "content": assistant})
20
 
21
  global entire_assistant_response
22
  entire_assistant_response = "" # Reset the entire assistant response
 
25
 
26
  url = "https://api.together.xyz/v1/chat/completions"
27
  payload = {
28
+ "model": model,
29
  "temperature": 1.05,
30
  "top_p": 0.9,
31
  "top_k": 50,
 
81
  if "messages" not in st.session_state:
82
  st.session_state.messages = []
83
 
84
+ # Define available models
85
+ models = {
86
+ "Mistral": "mistralai/Mistral-7B-Instruct-v0.2",
87
+ "Gemma-7B": "google/gemma-7b-it",
88
+ "Gemma-2B": "google/gemma-2b-it",
89
+ "Zephyr-7B-β": "HuggingFaceH4/zephyr-7b-beta",
90
+ "BibleLearnerAI": "NousResearch/Nous-Hermes-2-Yi-34B"
91
+ }
92
+
93
+ # Allow user to select a model
94
+ selected_model = st.sidebar.selectbox("Select Model", list(models.keys()))
95
+
96
+ # Create model description
97
+ st.sidebar.write(f"You're now chatting with **{selected_model}**")
98
+ st.sidebar.markdown("*Generated content may be inaccurate or false.*")
99
+ st.sidebar.markdown("\nLearn how to build this chatbot [here](https://ngebodh.github.io/projects/2024-03-05/).")
100
+ st.sidebar.markdown("\nRun into issues? Try the [back-up](https://huggingface.co/spaces/ngebodh/SimpleChatbot-Backup).")
101
+
102
+ if "prev_option" not in st.session_state:
103
+ st.session_state.prev_option = selected_model
104
+
105
+ if st.session_state.prev_option != selected_model:
106
+ st.session_state.messages = []
107
+ st.session_state.prev_option = selected_model
108
+
109
+ #Pull in the model we want to use
110
+ repo_id = models[selected_model]
111
+
112
+ st.subheader(f'AI - {selected_model}')
113
+
114
  # Accept user input
115
+ if prompt := st.text_input(f"Hi I'm {selected_model}, ask me a question"):
116
 
117
  # Display user message
118
+ with st.spinner("AI is typing..."):
119
  st.session_state.messages.append({"role": "user", "content": prompt})
120
 
121
+ # Call selected model to get response
122
+ response_stream = get_streamed_response(prompt, [(m["content"] for m in st.session_state.messages[:-1])], repo_id)
123
  for response in response_stream:
124
  st.session_state.messages.append({"role": "assistant", "content": response})
125
 
 
128
  if message["role"] == "user":
129
  st.text_input("You:", value=message["content"], disabled=True)
130
  else:
131
+ st.text_input(f"{selected_model}:", value=message["content"], disabled=True)