File size: 2,591 Bytes
798fa73 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
import os
import hydra
import flows
from flows.flow_launchers import FlowLauncher
from flows.backends.api_info import ApiInfo
from flows.utils.general_helpers import read_yaml_file
from flows import logging
from flows.flow_cache import CACHING_PARAMETERS, clear_cache
CACHING_PARAMETERS.do_caching = False # Set to True in order to disable caching
# clear_cache() # Uncomment this line to clear the cache
logging.set_verbosity_debug() # Uncomment this line to see verbose logs
from flows import flow_verse
dependencies = [
{"url": "aiflows/ChatWithDemonstrationsFlowModule", "revision": os.getcwd()},
]
flow_verse.sync_dependencies(dependencies)
if __name__ == "__main__":
# ~~~ Set the API information ~~~
# OpenAI backend
api_information = [ApiInfo(backend_used="openai",
api_key = os.getenv("OPENAI_API_KEY"))]
# # Azure backend
# api_information = ApiInfo(backend_used = "azure",
# api_base = os.getenv("AZURE_API_BASE"),
# api_key = os.getenv("AZURE_OPENAI_KEY"),
# api_version = os.getenv("AZURE_API_VERSION") )
root_dir = "."
cfg_path = os.path.join(root_dir, "demo.yaml")
cfg = read_yaml_file(cfg_path)
cfg["flow"]["subflows_config"]["chat_flow"]["backend"]["api_infos"] = api_information
# ~~~ Instantiate the Flow ~~~
flow_with_interfaces = {
"flow": hydra.utils.instantiate(cfg['flow'], _recursive_=False, _convert_="partial"),
"input_interface": (
None
if cfg.get( "input_interface", None) is None
else hydra.utils.instantiate(cfg['input_interface'], _recursive_=False)
),
"output_interface": (
None
if cfg.get( "output_interface", None) is None
else hydra.utils.instantiate(cfg['output_interface'], _recursive_=False)
),
}
# ~~~ Get the data ~~~
data = {"id": 0, "question": "What's the capital of France?"} # This can be a list of samples
# data = {"id": 0, "question": "Who was the NBA champion in 2023?"} # This can be a list of samples
# ~~~ Run inference ~~~
path_to_output_file = None
# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk
_, outputs = FlowLauncher.launch(
flow_with_interfaces=flow_with_interfaces,
data=data,
path_to_output_file=path_to_output_file
)
# ~~~ Print the output ~~~
flow_output_data = outputs[0]
print(flow_output_data)
|