VeryMadSoul commited on
Commit
48bdd01
β€’
1 Parent(s): e3bcff7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +64 -58
app.py CHANGED
@@ -1,63 +1,69 @@
1
  import streamlit as st
2
- from streamlit_chat import message
3
- from streamlit_extras.colored_header import colored_header
4
- from streamlit_extras.add_vertical_space import add_vertical_space
5
  from hugchat import hugchat
 
 
6
 
7
- st.set_page_config(page_title="HugChat - An LLM-powered Streamlit app")
 
8
 
9
- # Sidebar contents
10
  with st.sidebar:
11
- st.title('πŸ€—πŸ’¬ HugChat App')
12
- st.markdown('''
13
- ## About
14
- This app is an LLM-powered chatbot built using:
15
- - [Streamlit](https://streamlit.io/)
16
- - [HugChat](https://github.com/Soulter/hugging-chat-api)
17
- - [OpenAssistant/oasst-sft-6-llama-30b-xor](https://huggingface.co/OpenAssistant/oasst-sft-6-llama-30b-xor) LLM model
18
-
19
- πŸ’‘ Note: No API key required!
20
- ''')
21
- add_vertical_space(5)
22
- st.write('Made with ❀️ by [Data Professor](https://youtube.com/dataprofessor)')
23
-
24
- # Generate empty lists for generated and past.
25
- ## generated stores AI generated responses
26
- if 'generated' not in st.session_state:
27
- st.session_state['generated'] = ["I'm HugChat, How may I help you?"]
28
- ## past stores User's questions
29
- if 'past' not in st.session_state:
30
- st.session_state['past'] = ['Hi!']
31
-
32
- # Layout of input/response containers
33
- input_container = st.container()
34
- colored_header(label='', description='', color_name='blue-30')
35
- response_container = st.container()
36
-
37
- # User input
38
- ## Function for taking user provided prompt as input
39
- def get_text():
40
- input_text = st.text_input("You: ", "", key="input")
41
- return input_text
42
- ## Applying the user input box
43
- with input_container:
44
- user_input = get_text()
45
-
46
- # Response output
47
- ## Function for taking user prompt as input followed by producing AI generated responses
48
- def generate_response(prompt):
49
- chatbot = hugchat.ChatBot()
50
- response = chatbot.chat(prompt)
51
- return response
52
-
53
- ## Conditional display of AI generated responses as a function of user provided prompts
54
- with response_container:
55
- if user_input:
56
- response = generate_response(user_input)
57
- st.session_state.past.append(user_input)
58
- st.session_state.generated.append(response)
59
-
60
- if st.session_state['generated']:
61
- for i in range(len(st.session_state['generated'])):
62
- message(st.session_state['past'][i], is_user=True, key=str(i) + '_user')
63
- message(st.session_state["generated"][i], key=str(i))
 
 
 
 
 
 
 
1
  import streamlit as st
 
 
 
2
  from hugchat import hugchat
3
+ from hugchat.login import Login
4
+ import os
5
 
6
+ # App title
7
+ st.set_page_config(page_title="πŸ€—πŸ’¬ HugChat")
8
 
9
+ # Hugging Face Credentials
10
  with st.sidebar:
11
+ st.title('πŸ€—πŸ’¬ HugChat')
12
+ if ('EMAIL' in st.secrets) and ('PASS' in st.secrets):
13
+ st.success('HuggingFace Login credentials already provided!', icon='βœ…')
14
+ hf_email = st.secrets['EMAIL']
15
+ hf_pass = st.secrets['PASS']
16
+ else:
17
+ hf_email = st.text_input('Enter E-mail:', type='password')
18
+ hf_pass = st.text_input('Enter password:', type='password')
19
+ if not (hf_email and hf_pass):
20
+ st.warning('Please enter your credentials!', icon='⚠️')
21
+ else:
22
+ st.success('Proceed to entering your prompt message!', icon='πŸ‘‰')
23
+ st.markdown('πŸ“– Learn how to build this app in this [blog](https://blog.streamlit.io/how-to-build-an-llm-powered-chatbot-with-streamlit/)!')
24
+
25
+ # Store LLM generated responses
26
+ if "messages" not in st.session_state:
27
+ st.session_state.messages = [{"role": "assistant", "content": "How may I assist you today?"}]
28
+
29
+ # Display or clear chat messages
30
+ for message in st.session_state.messages:
31
+ with st.chat_message(message["role"]):
32
+ st.write(message["content"])
33
+
34
+ def clear_chat_history():
35
+ st.session_state.messages = [{"role": "assistant", "content": "How may I assist you today?"}]
36
+ st.sidebar.button('Clear Chat History', on_click=clear_chat_history)
37
+
38
+ # Function for generating LLM response
39
+ def generate_response(prompt_input, email, passwd):
40
+ # Hugging Face Login
41
+ sign = Login(email, passwd)
42
+ cookies = sign.login()
43
+ # Create ChatBot
44
+ chatbot = hugchat.ChatBot(cookies=cookies.get_dict())
45
+
46
+ for dict_message in st.session_state.messages:
47
+ string_dialogue = "You are a helpful assistant."
48
+ if dict_message["role"] == "user":
49
+ string_dialogue += "User: " + dict_message["content"] + "\n\n"
50
+ else:
51
+ string_dialogue += "Assistant: " + dict_message["content"] + "\n\n"
52
+
53
+ prompt = f"{string_dialogue} {prompt_input} Assistant: "
54
+ return chatbot.chat(prompt)
55
+
56
+ # User-provided prompt
57
+ if prompt := st.chat_input(disabled=not (hf_email and hf_pass)):
58
+ st.session_state.messages.append({"role": "user", "content": prompt})
59
+ with st.chat_message("user"):
60
+ st.write(prompt)
61
+
62
+ # Generate a new response if last message is not from assistant
63
+ if st.session_state.messages[-1]["role"] != "assistant":
64
+ with st.chat_message("assistant"):
65
+ with st.spinner("Thinking..."):
66
+ response = generate_response(prompt, hf_email, hf_pass)
67
+ st.write(response)
68
+ message = {"role": "assistant", "content": response}
69
+ st.session_state.messages.append(message)