Spaces:
No application file
No application file
michal
commited on
Commit
·
2bc248f
1
Parent(s):
269dd40
one input
Browse files- dumb-friendly.py +29 -14
dumb-friendly.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
|
|
|
2 |
|
3 |
from langchain import OpenAI, ConversationChain, LLMChain, PromptTemplate
|
4 |
from langchain.chains.conversation.memory import ConversationalBufferWindowMemory
|
@@ -20,7 +21,14 @@ Assistant is designed to talk to {name_of_person} and answer their questions.
|
|
20 |
{name_of_person} has brown hair.
|
21 |
"""
|
22 |
|
23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
|
25 |
{person_details}
|
26 |
|
@@ -30,12 +38,18 @@ template = """Assistant is a large language model trained by OpenAI.
|
|
30 |
|
31 |
Assistant is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.
|
32 |
|
33 |
-
{name_of_person}:
|
|
|
34 |
Assistant:
|
35 |
"""
|
36 |
|
37 |
prompt = PromptTemplate(
|
38 |
-
input_variables=["human_input",
|
|
|
|
|
|
|
|
|
|
|
39 |
template=template
|
40 |
)
|
41 |
|
@@ -46,18 +60,19 @@ chatgpt_chain = LLMChain(
|
|
46 |
memory=ConversationalBufferWindowMemory(k=3),
|
47 |
)
|
48 |
|
49 |
-
filename = "summary-of-benefits-paragraphs.txt"
|
50 |
-
with open(filename) as f:
|
51 |
-
document_text = f.read()
|
52 |
|
53 |
human_input = "Hi my name is Alfred Jamesmanson. I need your help Assistant. What color is my hair?"
|
54 |
|
55 |
-
output = chatgpt_chain.predict(
|
56 |
-
name_of_person=name_of_person,
|
57 |
-
person_details=person_details,
|
58 |
-
real_person_prompt=real_person_prompt,
|
59 |
-
document_text=document_text,
|
60 |
-
human_input=human_input,
|
61 |
-
)
|
62 |
|
63 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
|
2 |
+
import ipdb
|
3 |
|
4 |
from langchain import OpenAI, ConversationChain, LLMChain, PromptTemplate
|
5 |
from langchain.chains.conversation.memory import ConversationalBufferWindowMemory
|
|
|
21 |
{name_of_person} has brown hair.
|
22 |
"""
|
23 |
|
24 |
+
filename = "summary-of-benefits-paragraphs.txt"
|
25 |
+
with open(filename) as f:
|
26 |
+
document_text = f.read()
|
27 |
+
len_doc = len(document_text )
|
28 |
+
print("len doc is ", len_doc)
|
29 |
+
document_text = document_text[:(len_doc//2)]
|
30 |
+
|
31 |
+
base_template = f"""Assistant is a large language model trained by OpenAI.
|
32 |
|
33 |
{person_details}
|
34 |
|
|
|
38 |
|
39 |
Assistant is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.
|
40 |
|
41 |
+
{name_of_person}:"""
|
42 |
+
template = base_template + """ {human_input}
|
43 |
Assistant:
|
44 |
"""
|
45 |
|
46 |
prompt = PromptTemplate(
|
47 |
+
input_variables=["human_input",
|
48 |
+
# "real_person_prompt",
|
49 |
+
# "document_text",
|
50 |
+
# "person_details",
|
51 |
+
# "name_of_person",
|
52 |
+
],
|
53 |
template=template
|
54 |
)
|
55 |
|
|
|
60 |
memory=ConversationalBufferWindowMemory(k=3),
|
61 |
)
|
62 |
|
|
|
|
|
|
|
63 |
|
64 |
human_input = "Hi my name is Alfred Jamesmanson. I need your help Assistant. What color is my hair?"
|
65 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
66 |
|
67 |
+
while True:
|
68 |
+
human_input = input(": ")
|
69 |
+
with ipdb.launch_ipdb_on_exception():
|
70 |
+
output = chatgpt_chain.predict(
|
71 |
+
# name_of_person=name_of_person,
|
72 |
+
# person_details=person_details,
|
73 |
+
# real_person_prompt=real_person_prompt,
|
74 |
+
# document_text=document_text,
|
75 |
+
human_input=human_input,
|
76 |
+
)
|
77 |
+
|
78 |
+
print(output)
|