Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -14,24 +14,24 @@ SERPAPI_API_KEY = os.environ["SERPAPI_API_KEY"]
|
|
14 |
openai_client = openai.OpenAI(api_key=OPENAI_API_KEY)
|
15 |
|
16 |
|
17 |
-
tokenizer = AutoTokenizer.from_pretrained("eagle0504/llama-2-7b-miniguanaco")
|
18 |
-
model = AutoModelForCausalLM.from_pretrained("eagle0504/llama-2-7b-miniguanaco")
|
19 |
|
20 |
|
21 |
-
def generate_response_from_llama2(query):
|
22 |
|
23 |
-
|
24 |
-
|
25 |
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
|
30 |
-
|
31 |
-
|
32 |
|
33 |
-
|
34 |
-
|
35 |
|
36 |
|
37 |
# Initialize chat history
|
@@ -59,7 +59,7 @@ with st.expander("Instructions"):
|
|
59 |
|
60 |
option = st.sidebar.selectbox(
|
61 |
"Which task do you want to do?",
|
62 |
-
("Sentiment Analysis", "Medical Summarization", "
|
63 |
)
|
64 |
|
65 |
|
@@ -99,10 +99,10 @@ if prompt := st.chat_input("What is up?"):
|
|
99 |
if prompt:
|
100 |
out = pipe_summarization(prompt)
|
101 |
doc = out[0]["summary_text"]
|
102 |
-
elif option == "Llama2":
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
elif option == "ChatGPT":
|
107 |
if prompt:
|
108 |
out = call_chatgpt(query=prompt)
|
|
|
14 |
openai_client = openai.OpenAI(api_key=OPENAI_API_KEY)
|
15 |
|
16 |
|
17 |
+
# tokenizer = AutoTokenizer.from_pretrained("eagle0504/llama-2-7b-miniguanaco")
|
18 |
+
# model = AutoModelForCausalLM.from_pretrained("eagle0504/llama-2-7b-miniguanaco")
|
19 |
|
20 |
|
21 |
+
# def generate_response_from_llama2(query):
|
22 |
|
23 |
+
# # Tokenize the input text
|
24 |
+
# input_ids = tokenizer.encode(query, return_tensors="pt")
|
25 |
|
26 |
+
# # Generate a response
|
27 |
+
# # Adjust the parameters like max_length according to your needs
|
28 |
+
# output = model.generate(input_ids, max_length=50, num_return_sequences=1, temperature=0.7)
|
29 |
|
30 |
+
# # Decode the output to human-readable text
|
31 |
+
# generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
|
32 |
|
33 |
+
# # output
|
34 |
+
# return generated_text
|
35 |
|
36 |
|
37 |
# Initialize chat history
|
|
|
59 |
|
60 |
option = st.sidebar.selectbox(
|
61 |
"Which task do you want to do?",
|
62 |
+
("Sentiment Analysis", "Medical Summarization", "ChatGPT", "ChatGPT (with Google)"),
|
63 |
)
|
64 |
|
65 |
|
|
|
99 |
if prompt:
|
100 |
out = pipe_summarization(prompt)
|
101 |
doc = out[0]["summary_text"]
|
102 |
+
# elif option == "Llama2":
|
103 |
+
# if prompt:
|
104 |
+
# out = generate_response_from_llama2(query=prompt)
|
105 |
+
# doc = out
|
106 |
elif option == "ChatGPT":
|
107 |
if prompt:
|
108 |
out = call_chatgpt(query=prompt)
|