VeryMadSoul commited on
Commit
df35a1f
β€’
1 Parent(s): 61f9619

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +58 -59
app.py CHANGED
@@ -1,64 +1,63 @@
1
  import streamlit as st
 
 
 
2
  from hugchat import hugchat
3
- from hugchat.login import Login
4
- import os
5
 
6
- # App title
7
- st.set_page_config(page_title="πŸ€—πŸ’¬ HugChat")
8
 
9
- # Hugging Face Credentials
10
  with st.sidebar:
11
- st.title('πŸ€—πŸ’¬ HugChat')
12
- hf_email = st.text_input('Enter E-mail:', type='password')
13
- hf_pass = st.text_input('Enter password:', type='password')
14
- if not (hf_email and hf_pass):
15
- st.warning('Please enter your credentials!', icon='⚠️')
16
- else:
17
- st.success('Proceed to entering your prompt message!', icon='πŸ‘‰')
18
- st.markdown('πŸ“– Learn how to build this app in this [blog](https://blog.streamlit.io/how-to-build-an-llm-powered-chatbot-with-streamlit/)!')
19
-
20
- # Store LLM generated responses
21
- if "messages" not in st.session_state:
22
- st.session_state.messages = [{"role": "assistant", "content": "How may I assist you today?"}]
23
-
24
- # Display or clear chat messages
25
- for message in st.session_state.messages:
26
- with st.chat_message(message["role"]):
27
- st.write(message["content"])
28
-
29
- def clear_chat_history():
30
- st.session_state.messages = [{"role": "assistant", "content": "How may I assist you today?"}]
31
- st.sidebar.button('Clear Chat History', on_click=clear_chat_history)
32
-
33
- # Function for generating LLM response
34
- def generate_response(prompt_input, email, passwd):
35
- # Hugging Face Login
36
- sign = Login(email, passwd)
37
- cookies = sign.login()
38
- # Create ChatBot
39
- chatbot = hugchat.ChatBot(cookies=cookies.get_dict())
40
-
41
- for dict_message in st.session_state.messages:
42
- string_dialogue = "You are a helpful assistant."
43
- if dict_message["role"] == "user":
44
- string_dialogue += "User: " + dict_message["content"] + "\n\n"
45
- else:
46
- string_dialogue += "Assistant: " + dict_message["content"] + "\n\n"
47
-
48
- prompt = f"{string_dialogue} {prompt_input} Assistant: "
49
- return chatbot.chat(prompt)
50
-
51
- # User-provided prompt
52
- if prompt := st.chat_input(disabled=not (hf_email and hf_pass)):
53
- st.session_state.messages.append({"role": "user", "content": prompt})
54
- with st.chat_message("user"):
55
- st.write(prompt)
56
-
57
- # Generate a new response if last message is not from assistant
58
- if st.session_state.messages[-1]["role"] != "assistant":
59
- with st.chat_message("assistant"):
60
- with st.spinner("Thinking..."):
61
- response = generate_response(prompt, hf_email, hf_pass)
62
- st.write(response)
63
- message = {"role": "assistant", "content": response}
64
- st.session_state.messages.append(message)
 
1
  import streamlit as st
2
+ from streamlit_chat import message
3
+ from streamlit_extras.colored_header import colored_header
4
+ from streamlit_extras.add_vertical_space import add_vertical_space
5
  from hugchat import hugchat
 
 
6
 
7
+ st.set_page_config(page_title="HugChat - An LLM-powered Streamlit app")
 
8
 
9
+ # Sidebar contents
10
  with st.sidebar:
11
+ st.title('πŸ€—πŸ’¬ HugChat App')
12
+ st.markdown('''
13
+ ## About
14
+ This app is an LLM-powered chatbot built using:
15
+ - [Streamlit](https://streamlit.io/)
16
+ - [HugChat](https://github.com/Soulter/hugging-chat-api)
17
+ - [OpenAssistant/oasst-sft-6-llama-30b-xor](https://huggingface.co/OpenAssistant/oasst-sft-6-llama-30b-xor) LLM model
18
+
19
+ πŸ’‘ Note: No API key required!
20
+ ''')
21
+ add_vertical_space(5)
22
+ st.write('Made with ❀️ by [Data Professor](https://youtube.com/dataprofessor)')
23
+
24
+ # Generate empty lists for generated and past.
25
+ ## generated stores AI generated responses
26
+ if 'generated' not in st.session_state:
27
+ st.session_state['generated'] = ["I'm HugChat, How may I help you?"]
28
+ ## past stores User's questions
29
+ if 'past' not in st.session_state:
30
+ st.session_state['past'] = ['Hi!']
31
+
32
+ # Layout of input/response containers
33
+ input_container = st.container()
34
+ colored_header(label='', description='', color_name='blue-30')
35
+ response_container = st.container()
36
+
37
+ # User input
38
+ ## Function for taking user provided prompt as input
39
+ def get_text():
40
+ input_text = st.text_input("You: ", "", key="input")
41
+ return input_text
42
+ ## Applying the user input box
43
+ with input_container:
44
+ user_input = get_text()
45
+
46
+ # Response output
47
+ ## Function for taking user prompt as input followed by producing AI generated responses
48
+ def generate_response(prompt):
49
+ chatbot = hugchat.ChatBot()
50
+ response = chatbot.chat(prompt)
51
+ return response
52
+
53
+ ## Conditional display of AI generated responses as a function of user provided prompts
54
+ with response_container:
55
+ if user_input:
56
+ response = generate_response(user_input)
57
+ st.session_state.past.append(user_input)
58
+ st.session_state.generated.append(response)
59
+
60
+ if st.session_state['generated']:
61
+ for i in range(len(st.session_state['generated'])):
62
+ message(st.session_state['past'][i], is_user=True, key=str(i) + '_user')
63
+ message(st.session_state["generated"][i], key=str(i))