Getting ready for merge
Browse files- .gitignore +2 -1
- AutoGPTFlow.yaml +0 -2
- run.py +1 -8
.gitignore
CHANGED
@@ -440,4 +440,5 @@ $RECYCLE.BIN/
|
|
440 |
# End of https://www.toptal.com/developers/gitignore/api/python,java,c++,pycharm,visualstudiocode,macos,linux,windows
|
441 |
|
442 |
.*
|
443 |
-
flow_modules/
|
|
|
|
440 |
# End of https://www.toptal.com/developers/gitignore/api/python,java,c++,pycharm,visualstudiocode,macos,linux,windows
|
441 |
|
442 |
.*
|
443 |
+
flow_modules/
|
444 |
+
demo_db_dir/
|
AutoGPTFlow.yaml
CHANGED
@@ -48,7 +48,6 @@ subflows_config:
|
|
48 |
|
49 |
|
50 |
wiki_search:
|
51 |
-
_target_: flow_modules.aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow.instantiate_from_default_config
|
52 |
name: "proxy WikiSearchAtomicFlow"
|
53 |
flow_class_name: flow_modules.aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow
|
54 |
description: "A flow that searches Wikipedia for information."
|
@@ -97,7 +96,6 @@ subflows_config:
|
|
97 |
- "observation"
|
98 |
|
99 |
Memory:
|
100 |
-
_target_: flow_modules.aiflows.VectorStoreFlowModule.ChromaDBFlow.instantiate_from_default_config
|
101 |
n_results: 2
|
102 |
flow_class_name: flow_modules.aiflows.VectorStoreFlowModule.ChromaDBFlow
|
103 |
flow_endpoint: MemoryFlow
|
|
|
48 |
|
49 |
|
50 |
wiki_search:
|
|
|
51 |
name: "proxy WikiSearchAtomicFlow"
|
52 |
flow_class_name: flow_modules.aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow
|
53 |
description: "A flow that searches Wikipedia for information."
|
|
|
96 |
- "observation"
|
97 |
|
98 |
Memory:
|
|
|
99 |
n_results: 2
|
100 |
flow_class_name: flow_modules.aiflows.VectorStoreFlowModule.ChromaDBFlow
|
101 |
flow_endpoint: MemoryFlow
|
run.py
CHANGED
@@ -83,14 +83,7 @@ if __name__ == "__main__":
|
|
83 |
"goal": "Answer the following question: What is the profession and date of birth of Michael Jordan?",
|
84 |
}
|
85 |
|
86 |
-
|
87 |
-
#option1: use the FlowMessage class
|
88 |
-
input_message = FlowMessage(
|
89 |
-
data=data,
|
90 |
-
)
|
91 |
-
|
92 |
-
#option2: use the proxy_flow
|
93 |
-
#input_message = proxy_flow.package_input_message(data = data)
|
94 |
|
95 |
#7. ~~~ Run inference ~~~
|
96 |
future = proxy_flow.get_reply_future(input_message)
|
|
|
83 |
"goal": "Answer the following question: What is the profession and date of birth of Michael Jordan?",
|
84 |
}
|
85 |
|
86 |
+
input_message = proxy_flow.package_input_message(data = data)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
87 |
|
88 |
#7. ~~~ Run inference ~~~
|
89 |
future = proxy_flow.get_reply_future(input_message)
|