|
import os |
|
|
|
import hydra |
|
|
|
import flows |
|
from flows.flow_launchers import FlowLauncher |
|
from flows.backends.api_info import ApiInfo |
|
from flows.utils.general_helpers import read_yaml_file |
|
|
|
from flows import logging |
|
from flows.flow_cache import CACHING_PARAMETERS, clear_cache |
|
|
|
CACHING_PARAMETERS.do_caching = False |
|
|
|
|
|
logging.set_verbosity_debug() |
|
|
|
from flows import flow_verse |
|
|
|
|
|
dependencies = [ |
|
{"url": "aiflows/ChatWithDemonstrationsFlowModule", "revision": os.getcwd()}, |
|
] |
|
|
|
flow_verse.sync_dependencies(dependencies) |
|
|
|
if __name__ == "__main__": |
|
|
|
|
|
|
|
api_information = [ApiInfo(backend_used="openai", |
|
api_key = os.getenv("OPENAI_API_KEY"))] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
root_dir = "." |
|
cfg_path = os.path.join(root_dir, "demo.yaml") |
|
cfg = read_yaml_file(cfg_path) |
|
|
|
cfg["flow"]["subflows_config"]["chat_flow"]["backend"]["api_infos"] = api_information |
|
|
|
|
|
flow_with_interfaces = { |
|
"flow": hydra.utils.instantiate(cfg['flow'], _recursive_=False, _convert_="partial"), |
|
"input_interface": ( |
|
None |
|
if cfg.get( "input_interface", None) is None |
|
else hydra.utils.instantiate(cfg['input_interface'], _recursive_=False) |
|
), |
|
"output_interface": ( |
|
None |
|
if cfg.get( "output_interface", None) is None |
|
else hydra.utils.instantiate(cfg['output_interface'], _recursive_=False) |
|
), |
|
} |
|
|
|
data = {"id": 0, "question": "What's the capital of France?"} |
|
|
|
|
|
path_to_output_file = None |
|
|
|
|
|
_, outputs = FlowLauncher.launch( |
|
flow_with_interfaces=flow_with_interfaces, |
|
data=data, |
|
path_to_output_file=path_to_output_file |
|
) |
|
|
|
|
|
flow_output_data = outputs[0] |
|
print(flow_output_data) |
|
|