|
import os |
|
|
|
import hydra |
|
|
|
import flows |
|
from flows.flow_launchers import FlowLauncher, ApiInfo |
|
from flows.utils.general_helpers import read_yaml_file |
|
|
|
from flows import logging |
|
from flows.flow_cache import CACHING_PARAMETERS, clear_cache |
|
|
|
CACHING_PARAMETERS.do_caching = False |
|
|
|
|
|
logging.set_verbosity_debug() |
|
|
|
dependencies = [ |
|
{"url": "aiflows/AutoGPTFlowModule", "revision": os.getcwd()}, |
|
{"url": "aiflows/LCToolFlowModule", "revision": "46dd24ecc3dc4f4f0191e57c202cc7d20e8e7782"}, |
|
] |
|
from flows import flow_verse |
|
flow_verse.sync_dependencies(dependencies) |
|
|
|
if __name__ == "__main__": |
|
|
|
|
|
|
|
|
|
api_information = ApiInfo("azure", os.getenv("AZURE_OPENAI_KEY"), os.getenv("AZURE_OPENAI_ENDPOINT")) |
|
|
|
root_dir = "." |
|
cfg_path = os.path.join(root_dir, "AutoGPT.yaml") |
|
cfg = read_yaml_file(cfg_path) |
|
|
|
|
|
flow_with_interfaces = { |
|
"flow": hydra.utils.instantiate(cfg['flow'], _recursive_=False, _convert_="partial"), |
|
"input_interface": ( |
|
None |
|
if getattr(cfg, "input_interface", None) is None |
|
else hydra.utils.instantiate(cfg['input_interface'], _recursive_=False) |
|
), |
|
"output_interface": ( |
|
None |
|
if getattr(cfg, "output_interface", None) is None |
|
else hydra.utils.instantiate(cfg['output_interface'], _recursive_=False) |
|
), |
|
} |
|
|
|
|
|
|
|
|
|
data = {"id": 0, "goal": "Answer the following question: What is the date of birth of Michael Jordan?"} |
|
|
|
|
|
|
|
|
|
|
|
path_to_output_file = None |
|
|
|
|
|
_, outputs = FlowLauncher.launch( |
|
flow_with_interfaces=flow_with_interfaces, |
|
data=data, |
|
path_to_output_file=path_to_output_file, |
|
api_information=api_information, |
|
) |
|
|
|
|
|
flow_output_data = outputs[0] |
|
print(flow_output_data) |
|
|