nbaldwin commited on
Commit
f2030ec
·
1 Parent(s): 6175374

added demo and readme

Browse files
ChatHumanFlowModule.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flows.base_flows import CircularFlow
2
+ from flows.utils import logging
3
+
4
+ log = logging.get_logger(f"flows.{__name__}")
5
+
6
+
7
+ class ChatHumanFlowModule(CircularFlow):
8
+ """ This class implements a Chat Human Flow Module. It is a flow that consists of two sub-flows that are executed circularly. It Contains the following subflows:
9
+
10
+ - A User Flow: A flow makes queries to the Assistant Flow. E.g. The user asks the assistant (LLM) a question.
11
+ - A Assistant Flow: A flow that responds to queries made by the User Flow. E.g. The assistant (LLM) answers the user's question.
12
+
13
+ To end the interaction, the user must type "\<END\>"
14
+
15
+ An illustration of the flow is as follows:
16
+
17
+ |------> User Flow -----------> |
18
+ ^ |
19
+ | |
20
+ | v
21
+ |<------ Assistant Flow <-------|
22
+
23
+ *Configuration Parameters*:
24
+
25
+ - `name` (str): The name of the flow. Default: "ChatHumanFlowModule"
26
+ - `description` (str): A description of the flow. This description is used to generate the help message of the flow.
27
+ Default: "Flow that enables chatting between a ChatAtomicFlow and a user providing the input."
28
+ - `max_rounds` (int): The maximum number of rounds the flow can run for. Default: None, which means that there is no limit on the number of rounds.
29
+ - `early_exit_key` (str): The key that is used to exit the flow. Default: "end_of_interaction"
30
+ - `subflows_config` (Dict[str,Any]): A dictionary of subflows configurations. Default:
31
+ - `Assistant Flow`: The configuration of the Assistant Flow. By default, it a ChatAtomicFlow. It default parmaters are defined in ChatAtomicFlowModule.
32
+ - `User Flow`: The configuration of the User Flow. By default, it a HumanStandardInputFlow. It default parmaters are defined in HumanStandardInputFlowModule.
33
+ - `topology` (str): (List[Dict[str,Any]]): The topology of the flow which is "circular".
34
+ By default, the topology is the one shown in the illustration above (the topology is also described in ChatHumanFlowModule.yaml).
35
+
36
+ *Input Interface*:
37
+
38
+ - None. By default, the input interface doesn't expect any input.
39
+
40
+ *Output Interface*:
41
+
42
+ - `end_of_interaction` (bool): Whether the interaction is finished or not.
43
+
44
+ :param \**kwargs: Arguments to be passed to the parent class CircularFlow constructor.
45
+ :type \**kwargs: Dict[str, Any]
46
+ """
47
+
48
+
49
+ def __init__(self, **kwargs):
50
+ super().__init__(**kwargs)
51
+
52
+ @classmethod
53
+ def type(cls):
54
+ """ This method returns the type of the flow."""
55
+ return "OpenAIChatHumanFlowModule"
OpenAIChatHumanFlowModule.yaml → ChatHumanFlowModule.yaml RENAMED
@@ -1,16 +1,18 @@
1
- name: "OpenAIChatInteractiveFlow"
2
- description: "Flow for chatting with an OpenAI Flow and a user providing the input."
3
 
4
  max_rounds: null # Run until early exit is detected
5
 
6
- input_keys: null # Should be specified by the user
7
-
8
- output_keys: null # Should be specified by the user
 
 
 
9
 
10
  subflows_config:
11
  Assistant:
12
- _target_: aiflows.OpenAIChatFlowModule.OpenAIChatAtomicFlow.instantiate_from_default_config
13
- model_name: "gpt-4"
14
  User:
15
  _target_: aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow.instantiate_from_default_config
16
 
@@ -48,16 +50,18 @@ topology:
48
  ### Output Interface
49
  output_interface:
50
  _target_: flows.interfaces.KeyInterface
 
 
51
  additional_transformations:
52
  - _target_: flows.data_transformations.RegexFirstOccurrenceExtractor
53
  regex: '(?<=```answer)([\s\S]*?)(?=```)'
54
- input_key: "human_input"
55
  output_key: "answer"
56
  strip: True
57
  assert_unique: True
58
  - _target_: flows.data_transformations.EndOfInteraction
59
  end_of_interaction_string: "<END>"
60
- input_key: "human_input"
61
  output_key: "end_of_interaction"
62
 
63
  early_exit_key: "end_of_interaction"
 
1
+ name: "ChatInteractiveFlow"
2
+ description: "Flow that enables chatting between a ChatAtomicFlow and a user providing the input."
3
 
4
  max_rounds: null # Run until early exit is detected
5
 
6
+ input_interface:
7
+ _target_: flows.interfaces.KeyInterface
8
+
9
+ output_interface:
10
+ - "end_of_interaction"
11
+ - "answer"
12
 
13
  subflows_config:
14
  Assistant:
15
+ _target_: aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config
 
16
  User:
17
  _target_: aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow.instantiate_from_default_config
18
 
 
50
  ### Output Interface
51
  output_interface:
52
  _target_: flows.interfaces.KeyInterface
53
+ keys_to_rename:
54
+ human_input: query
55
  additional_transformations:
56
  - _target_: flows.data_transformations.RegexFirstOccurrenceExtractor
57
  regex: '(?<=```answer)([\s\S]*?)(?=```)'
58
+ input_key: "query"
59
  output_key: "answer"
60
  strip: True
61
  assert_unique: True
62
  - _target_: flows.data_transformations.EndOfInteraction
63
  end_of_interaction_string: "<END>"
64
+ input_key: "query"
65
  output_key: "end_of_interaction"
66
 
67
  early_exit_key: "end_of_interaction"
OpenAIChatHumanFlowModule.py DELETED
@@ -1,14 +0,0 @@
1
- from flows.base_flows import CircularFlow
2
- from flows.utils import logging
3
-
4
- log = logging.get_logger(__name__)
5
-
6
-
7
- class OpenAIChatHumanFlowModule(CircularFlow):
8
-
9
- def __init__(self, **kwargs):
10
- super().__init__(**kwargs)
11
-
12
- @classmethod
13
- def type(cls):
14
- return "OpenAIChatHumanFlowModule"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
README.md DELETED
@@ -1,24 +0,0 @@
1
- ---
2
- license: mit
3
- ---
4
- ## Description
5
-
6
- &lt; Flow description &gt;
7
-
8
- ## Configuration parameters
9
-
10
- &lt; Name 1 &gt; (&lt; Type 1 &gt;): &lt; Description 1 &gt;. Required parameter.
11
-
12
- &lt; Name 2 &gt; (&lt; Type 2 &gt;): &lt; Description 2 &gt;. Default value is: &lt; value 2 &gt;
13
-
14
- ## Input interface
15
-
16
- &lt; Name 1 &gt; (&lt; Type 1 &gt;): &lt; Description 1 &gt;.
17
-
18
- (Note that the interface might depend on the state of the Flow.)
19
-
20
- ## Output interface
21
-
22
- &lt; Name 1 &gt; (&lt; Type 1 &gt;): &lt; Description 1 &gt;.
23
-
24
- (Note that the interface might depend on the state of the Flow.)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
__init__.py CHANGED
@@ -1,11 +1,11 @@
1
  # ~~~ Specify the dependencies ~~~
2
  dependencies = [
3
- {"url": "aiflows/OpenAIChatFlowModule", "revision": "6a1e351a915f00193f18f3da3b61c497df1d31a3"},
4
- {"url": "aiflows/HumanStandardInputFlowModule", "revision": "890e92da1fefbae642fd84296e31bca7f61ea710"},
5
  ]
6
  from flows import flow_verse
7
 
8
  flow_verse.sync_dependencies(dependencies)
9
  # ~~~
10
 
11
- from .OpenAIChatHumanFlowModule import OpenAIChatHumanFlowModule
 
1
  # ~~~ Specify the dependencies ~~~
2
  dependencies = [
3
+ {"url": "aiflows/ChatFlowModule", "revision": "a749ad10ed39776ba6721c37d0dc22af49ca0f17"},
4
+ {"url": "aiflows/HumanStandardInputFlowModule", "revision": "5683a922372c5fa90be9f6447d6662d8d80341fc"},
5
  ]
6
  from flows import flow_verse
7
 
8
  flow_verse.sync_dependencies(dependencies)
9
  # ~~~
10
 
11
+ from .ChatHumanFlowModule import ChatHumanFlowModule
demo.yaml ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+ flow:
4
+ max_rounds: 5
5
+ _target_: aiflows.ChatInteractiveFlowModule.ChatHumanFlowModule.instantiate_from_default_config
6
+ subflows_config:
7
+ Assistant:
8
+ _target_: aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config
9
+ backend:
10
+ _target_: flows.backends.llm_lite.LiteLLMBackend
11
+ api_infos: ???
12
+ model_name:
13
+ openai: "gpt-4"
14
+ azure: "azure/gpt-4"
15
+ input_interface_non_initialized: []
16
+
17
+ User:
18
+ _target_: aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow.instantiate_from_default_config
19
+ request_multi_line_input_flag: False
20
+ query_message_prompt_template:
21
+ _target_: flows.prompt_template.JinjaPrompt
22
+ template: |2-
23
+ {{api_output}}
24
+
25
+ To end an Interaction, type <END> and press enter.
26
+
27
+ input_variables: ["api_output"]
28
+ input_interface:
29
+ - "api_output"
30
+ output_interface:
31
+ - "human_input"
run.py CHANGED
@@ -5,7 +5,8 @@ import os
5
  import hydra
6
 
7
  import flows
8
- from flows.flow_launchers import FlowLauncher, ApiInfo
 
9
  from flows.utils.general_helpers import read_yaml_file
10
 
11
  from flows import logging
@@ -16,36 +17,49 @@ CACHING_PARAMETERS.do_caching = False # Set to True to enable caching
16
 
17
  logging.set_verbosity_debug()
18
 
 
 
 
 
 
19
 
20
  if __name__ == "__main__":
21
  # ~~~ Set the API information ~~~
22
  # OpenAI backend
23
- # api_information = ApiInfo("openai", os.getenv("OPENAI_API_KEY"))
24
- # Azure backend
25
- api_information = ApiInfo("azure", os.getenv("AZURE_OPENAI_KEY"), os.getenv("AZURE_OPENAI_ENDPOINT"))
26
 
27
- # ~~~ Instantiate the Flow ~~~
 
 
 
 
 
 
 
 
 
28
  root_dir = "."
29
- cfg_path = os.path.join(root_dir, "FlowName.yaml")
30
  cfg = read_yaml_file(cfg_path)
31
-
 
 
 
32
  flow_with_interfaces = {
33
  "flow": hydra.utils.instantiate(cfg['flow'], _recursive_=False, _convert_="partial"),
34
  "input_interface": (
35
  None
36
- if getattr(cfg, "input_interface", None) is None
37
  else hydra.utils.instantiate(cfg['input_interface'], _recursive_=False)
38
  ),
39
  "output_interface": (
40
  None
41
- if getattr(cfg, "output_interface", None) is None
42
  else hydra.utils.instantiate(cfg['output_interface'], _recursive_=False)
43
  ),
44
  }
45
-
46
  # ~~~ Get the data ~~~
47
- # This can be a list of samples
48
- data = {"id": 0} # Add your data here
49
 
50
  # ~~~ Run inference ~~~
51
  path_to_output_file = None
@@ -54,10 +68,10 @@ if __name__ == "__main__":
54
  _, outputs = FlowLauncher.launch(
55
  flow_with_interfaces=flow_with_interfaces,
56
  data=data,
57
- path_to_output_file=path_to_output_file,
58
- api_information=api_information,
59
  )
60
 
61
  # ~~~ Print the output ~~~
62
  flow_output_data = outputs[0]
63
  print(flow_output_data)
 
 
5
  import hydra
6
 
7
  import flows
8
+ from flows.flow_launchers import FlowLauncher
9
+ from flows.backends.api_info import ApiInfo
10
  from flows.utils.general_helpers import read_yaml_file
11
 
12
  from flows import logging
 
17
 
18
  logging.set_verbosity_debug()
19
 
20
+ dependencies = [
21
+ {"url": "aiflows/ChatInteractiveFlowModule", "revision": os.getcwd()},
22
+ ]
23
+ from flows import flow_verse
24
+ flow_verse.sync_dependencies(dependencies)
25
 
26
  if __name__ == "__main__":
27
  # ~~~ Set the API information ~~~
28
  # OpenAI backend
 
 
 
29
 
30
+ api_information = [ApiInfo(backend_used="openai",
31
+ api_key = os.getenv("OPENAI_API_KEY"))]
32
+
33
+
34
+ # # Azure backend
35
+ # api_information = ApiInfo(backend_used = "azure",
36
+ # api_base = os.getenv("AZURE_API_BASE"),
37
+ # api_key = os.getenv("AZURE_OPENAI_KEY"),
38
+ # api_version = os.getenv("AZURE_API_VERSION") )
39
+
40
  root_dir = "."
41
+ cfg_path = os.path.join(root_dir, "demo.yaml")
42
  cfg = read_yaml_file(cfg_path)
43
+
44
+ cfg["flow"]["subflows_config"]["Assistant"]["backend"]["api_infos"] = api_information
45
+
46
+ # ~~~ Instantiate the Flow ~~~
47
  flow_with_interfaces = {
48
  "flow": hydra.utils.instantiate(cfg['flow'], _recursive_=False, _convert_="partial"),
49
  "input_interface": (
50
  None
51
+ if cfg.get( "input_interface", None) is None
52
  else hydra.utils.instantiate(cfg['input_interface'], _recursive_=False)
53
  ),
54
  "output_interface": (
55
  None
56
+ if cfg.get( "output_interface", None) is None
57
  else hydra.utils.instantiate(cfg['output_interface'], _recursive_=False)
58
  ),
59
  }
 
60
  # ~~~ Get the data ~~~
61
+ data = {"id": 0, "question": "Generate a short random sentence"} # This can be a list of samples
62
+ # data = {"id": 0, "question": "Who was the NBA champion in 2023?"} # This can be a list of samples
63
 
64
  # ~~~ Run inference ~~~
65
  path_to_output_file = None
 
68
  _, outputs = FlowLauncher.launch(
69
  flow_with_interfaces=flow_with_interfaces,
70
  data=data,
71
+ path_to_output_file=path_to_output_file
 
72
  )
73
 
74
  # ~~~ Print the output ~~~
75
  flow_output_data = outputs[0]
76
  print(flow_output_data)
77
+