Upload app.py
Browse files
app.py
CHANGED
@@ -1,45 +1,35 @@
|
|
1 |
from langchain import HuggingFaceHub
|
2 |
-
from langchain.schema import
|
3 |
-
|
4 |
from dotenv import load_dotenv
|
|
|
5 |
|
|
|
6 |
load_dotenv()
|
7 |
|
8 |
-
|
9 |
-
|
|
|
|
|
10 |
st.set_page_config(page_title="Chatbot")
|
11 |
st.header('Langchain Application')
|
12 |
|
|
|
13 |
|
14 |
-
|
15 |
-
# Initialization session
|
16 |
-
##if 'key' not in st.session_state:
|
17 |
-
# st.session_state['key'] =[
|
18 |
-
# SystemMessage(content='You are AI ')
|
19 |
-
# ]
|
20 |
-
|
21 |
-
|
22 |
-
# function to load huggingface model and get response
|
23 |
def get_huggingface_response(question):
|
24 |
-
llm_huggingface=HuggingFaceHub(repo_id="google/flan-t5-large",model_kwargs={"temperature":0.0})
|
25 |
-
|
26 |
-
#st.session_state['key'].append(HumanMessage(content=question))
|
27 |
-
#response=llm_huggingface(st.session_state['key'])
|
28 |
response = llm_huggingface(question)
|
29 |
-
|
30 |
-
return(response)
|
31 |
-
|
32 |
-
|
33 |
|
|
|
|
|
34 |
|
35 |
-
|
36 |
-
|
37 |
-
## call function
|
38 |
-
response=get_huggingface_response(input)
|
39 |
|
40 |
-
|
41 |
-
submit=st.button('Generate')
|
42 |
-
## click button
|
43 |
if submit:
|
44 |
-
|
45 |
-
|
|
|
|
|
|
|
|
1 |
from langchain import HuggingFaceHub
|
2 |
+
from langchain.schema import AIMessage
|
|
|
3 |
from dotenv import load_dotenv
|
4 |
+
import streamlit as st
|
5 |
|
6 |
+
# Load environment variables
|
7 |
load_dotenv()
|
8 |
|
9 |
+
# Initialize HuggingFace model outside the app
|
10 |
+
llm_huggingface = HuggingFaceHub(repo_id="google/flan-t5-large", model_kwargs={"temperature": 0.0, "max_length": 64})
|
11 |
+
|
12 |
+
# Streamlit app
|
13 |
st.set_page_config(page_title="Chatbot")
|
14 |
st.header('Langchain Application')
|
15 |
|
16 |
+
# Remove the session_state initialization as it's not being used in this example
|
17 |
|
18 |
+
# Function to load HuggingFace model and get response
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
def get_huggingface_response(question):
|
|
|
|
|
|
|
|
|
20 |
response = llm_huggingface(question)
|
21 |
+
return response
|
|
|
|
|
|
|
22 |
|
23 |
+
# Streamlit input
|
24 |
+
user_input = st.text_input("Input: ", key="input")
|
25 |
|
26 |
+
# Streamlit button
|
27 |
+
submit = st.button('Generate')
|
|
|
|
|
28 |
|
29 |
+
# Check if button is clicked
|
|
|
|
|
30 |
if submit:
|
31 |
+
# Call function to get response
|
32 |
+
response = get_huggingface_response(user_input)
|
33 |
+
# Display response
|
34 |
+
st.subheader("The response is ")
|
35 |
+
st.write(response)
|