nbaldwin commited on
Commit
93c74cc
1 Parent(s): 5d8985c

renamed flows to aiflows

Browse files
ControllerAtomicFlow.py CHANGED
@@ -34,17 +34,17 @@ class ControllerAtomicFlow(ChatAtomicFlow):
34
  - `commands` (List[Dict[str,Any]]): A list of commands that the controller can call. Default: []
35
  - `finish` (Dict[str,Any]): The configuration of the finish command. Default parameters: No default parameters.
36
  - `system_message_prompt_template` (Dict[str, Any]): The prompt template used to generate the system message.
37
- By default, it's type is flows.prompt_template.JinjaPrompt. It's default parameters are:
38
  - `template` (str): The template of the prompt. Default: see ControllerAtomicFlow.yaml for the default template.
39
  - `input_variables` (List[str]): The input variables of the prompt. Default: ["commands"]. Note that the commands are the commands of the executor
40
  (subflows of branching flow) and are actually to the system prompt template via the `_build_commands_manual` function of this class.
41
  - `human_message_prompt_template` (Dict[str, Any]): The prompt template of the human/user message (message used everytime the except the first time in).
42
- It's passed as the user message to the LLM. By default its of type flows.prompt_template.JinjaPrompt and has the following parameters:
43
  - `template` (str): The template of the prompt. Default: see ControllerAtomicFlow.yaml for the default template.
44
  - `input_variables` (List[str]): The input variables of the prompt. Default: ["observation"]
45
  - init_human_message_prompt_template` (Dict[str, Any]): The prompt template of the human/user message used to initialize the conversation
46
  (first time in). It is used to generate the human message. It's passed as the user message to the LLM.
47
- By default its of type flows.prompt_template.JinjaPrompt and has the following parameters:
48
  - `template` (str): The template of the prompt. Default: see ControllerAtomicFlow.yaml for the default template.
49
  - `input_variables` (List[str]): The input variables of the prompt. Default: ["goal"]
50
  - All other parameters are inherited from the default configuration of ChatAtomicFlow (see Flowcard, i.e. README.md, of ChatAtomicFlowModule).
 
34
  - `commands` (List[Dict[str,Any]]): A list of commands that the controller can call. Default: []
35
  - `finish` (Dict[str,Any]): The configuration of the finish command. Default parameters: No default parameters.
36
  - `system_message_prompt_template` (Dict[str, Any]): The prompt template used to generate the system message.
37
+ By default, it's type is aiflows.prompt_template.JinjaPrompt. It's default parameters are:
38
  - `template` (str): The template of the prompt. Default: see ControllerAtomicFlow.yaml for the default template.
39
  - `input_variables` (List[str]): The input variables of the prompt. Default: ["commands"]. Note that the commands are the commands of the executor
40
  (subflows of branching flow) and are actually to the system prompt template via the `_build_commands_manual` function of this class.
41
  - `human_message_prompt_template` (Dict[str, Any]): The prompt template of the human/user message (message used everytime the except the first time in).
42
+ It's passed as the user message to the LLM. By default its of type aiflows.prompt_template.JinjaPrompt and has the following parameters:
43
  - `template` (str): The template of the prompt. Default: see ControllerAtomicFlow.yaml for the default template.
44
  - `input_variables` (List[str]): The input variables of the prompt. Default: ["observation"]
45
  - init_human_message_prompt_template` (Dict[str, Any]): The prompt template of the human/user message used to initialize the conversation
46
  (first time in). It is used to generate the human message. It's passed as the user message to the LLM.
47
+ By default its of type aiflows.prompt_template.JinjaPrompt and has the following parameters:
48
  - `template` (str): The template of the prompt. Default: see ControllerAtomicFlow.yaml for the default template.
49
  - `input_variables` (List[str]): The input variables of the prompt. Default: ["goal"]
50
  - All other parameters are inherited from the default configuration of ChatAtomicFlow (see Flowcard, i.e. README.md, of ChatAtomicFlowModule).
ControllerAtomicFlow.yaml CHANGED
@@ -28,7 +28,7 @@ output_interface:
28
  #######################################################
29
  # ToDo: Some parts of the prompt don't make sense -- update them
30
  system_message_prompt_template:
31
- _target_: flows.prompt_template.JinjaPrompt
32
  template: |2-
33
  You are a smart AI assistant.
34
 
@@ -69,7 +69,7 @@ system_message_prompt_template:
69
  input_variables: ["commands"]
70
 
71
  human_message_prompt_template:
72
- _target_: flows.prompt_template.JinjaPrompt
73
  template: |2-
74
  Here is the response to your last action:
75
  {{observation}}
@@ -77,7 +77,7 @@ human_message_prompt_template:
77
  - "observation"
78
 
79
  init_human_message_prompt_template:
80
- _target_: flows.prompt_template.JinjaPrompt
81
  template: |2-
82
  Here is the goal you need to achieve:
83
  {{goal}}
 
28
  #######################################################
29
  # ToDo: Some parts of the prompt don't make sense -- update them
30
  system_message_prompt_template:
31
+ _target_: aiflows.prompt_template.JinjaPrompt
32
  template: |2-
33
  You are a smart AI assistant.
34
 
 
69
  input_variables: ["commands"]
70
 
71
  human_message_prompt_template:
72
+ _target_: aiflows.prompt_template.JinjaPrompt
73
  template: |2-
74
  Here is the response to your last action:
75
  {{observation}}
 
77
  - "observation"
78
 
79
  init_human_message_prompt_template:
80
+ _target_: aiflows.prompt_template.JinjaPrompt
81
  template: |2-
82
  Here is the goal you need to achieve:
83
  {{goal}}
ControllerExecutorFlow.py CHANGED
@@ -1,7 +1,7 @@
1
  from typing import Dict, Any
2
 
3
- from flows.base_flows import CircularFlow
4
- from flows.utils import logging
5
 
6
  from .ControllerAtomicFlow import ControllerAtomicFlow
7
 
 
1
  from typing import Dict, Any
2
 
3
+ from aiflows.base_flows import CircularFlow
4
+ from aiflows.utils import logging
5
 
6
  from .ControllerAtomicFlow import ControllerAtomicFlow
7
 
ControllerExecutorFlow.yaml CHANGED
@@ -12,7 +12,7 @@ output_interface:
12
  ### Subflows specification
13
  subflows_config:
14
  Controller:
15
- _target_: aiflows.ControllerAtomicFlow.instantiate_from_default_config
16
  finish:
17
  description: "Signal that the objective has been satisfied, and returns the answer to the user."
18
  input_args: ["answer"]
@@ -24,7 +24,7 @@ subflows_config:
24
 
25
 
26
  Executor:
27
- _target_: flows.base_flows.BranchingFlow.instantiate_from_default_config
28
  # E.g.,
29
  # subflows_config:
30
  # wiki_search:
@@ -36,9 +36,9 @@ early_exit_key: "EARLY_EXIT"
36
  topology:
37
  - goal: "Select the next action and prepare the input for the executor."
38
  input_interface:
39
- _target_: flows.interfaces.KeyInterface
40
  additional_transformations:
41
- - _target_: flows.data_transformations.KeyMatchInput
42
  flow: Controller
43
  output_interface:
44
  _target_: ControllerExecutorFlow.detect_finish_or_continue
@@ -46,14 +46,14 @@ topology:
46
 
47
  - goal: "Execute the action specified by the Controller."
48
  input_interface:
49
- _target_: flows.interfaces.KeyInterface
50
  keys_to_rename:
51
  command: branch
52
  command_args: branch_input_data
53
  keys_to_select: ["branch", "branch_input_data"]
54
  flow: Executor
55
  output_interface:
56
- _target_: flows.interfaces.KeyInterface
57
  keys_to_rename:
58
  branch_output_data: observation
59
  keys_to_select: ["observation"]
 
12
  ### Subflows specification
13
  subflows_config:
14
  Controller:
15
+ _target_: flow_modules.aiflows.ControllerAtomicFlow.instantiate_from_default_config
16
  finish:
17
  description: "Signal that the objective has been satisfied, and returns the answer to the user."
18
  input_args: ["answer"]
 
24
 
25
 
26
  Executor:
27
+ _target_: aiflows.base_flows.BranchingFlow.instantiate_from_default_config
28
  # E.g.,
29
  # subflows_config:
30
  # wiki_search:
 
36
  topology:
37
  - goal: "Select the next action and prepare the input for the executor."
38
  input_interface:
39
+ _target_: aiflows.interfaces.KeyInterface
40
  additional_transformations:
41
+ - _target_: aiflows.data_transformations.KeyMatchInput
42
  flow: Controller
43
  output_interface:
44
  _target_: ControllerExecutorFlow.detect_finish_or_continue
 
46
 
47
  - goal: "Execute the action specified by the Controller."
48
  input_interface:
49
+ _target_: aiflows.interfaces.KeyInterface
50
  keys_to_rename:
51
  command: branch
52
  command_args: branch_input_data
53
  keys_to_select: ["branch", "branch_input_data"]
54
  flow: Executor
55
  output_interface:
56
+ _target_: aiflows.interfaces.KeyInterface
57
  keys_to_rename:
58
  branch_output_data: observation
59
  keys_to_select: ["observation"]
README.md CHANGED
@@ -62,17 +62,17 @@ Default: "Proposes the next action to take towards achieving the goal, and prepa
62
  - `commands` (List[Dict[str,Any]]): A list of commands that the controller can call. Default: []
63
  - `finish` (Dict[str,Any]): The configuration of the finish command. Default parameters: No default parameters.
64
  - `system_message_prompt_template` (Dict[str, Any]): The prompt template used to generate the system message.
65
- By default, it's type is flows.prompt_template.JinjaPrompt. It's default parameters are:
66
  - `template` (str): The template of the prompt. Default: see ControllerAtomicFlow.yaml for the default template.
67
  - `input_variables` (List[str]): The input variables of the prompt. Default: ["commands"]. Note that the commands are the commands of the executor
68
  (subflows of branching flow) and are actually to the system prompt template via the `_build_commands_manual` function of this class.
69
  - `human_message_prompt_template` (Dict[str, Any]): The prompt template of the human/user message (message used everytime the except the first time in).
70
- It's passed as the user message to the LLM. By default its of type flows.prompt_template.JinjaPrompt and has the following parameters:
71
  - `template` (str): The template of the prompt. Default: see ControllerAtomicFlow.yaml for the default template.
72
  - `input_variables` (List[str]): The input variables of the prompt. Default: ["observation"]
73
  - init_human_message_prompt_template` (Dict[str, Any]): The prompt template of the human/user message used to initialize the conversation
74
  (first time in). It is used to generate the human message. It's passed as the user message to the LLM.
75
- By default its of type flows.prompt_template.JinjaPrompt and has the following parameters:
76
  - `template` (str): The template of the prompt. Default: see ControllerAtomicFlow.yaml for the default template.
77
  - `input_variables` (List[str]): The input variables of the prompt. Default: ["goal"]
78
  - All other parameters are inherited from the default configuration of ChatAtomicFlow (see Flowcard, i.e. README.md, of ChatAtomicFlowModule).
 
62
  - `commands` (List[Dict[str,Any]]): A list of commands that the controller can call. Default: []
63
  - `finish` (Dict[str,Any]): The configuration of the finish command. Default parameters: No default parameters.
64
  - `system_message_prompt_template` (Dict[str, Any]): The prompt template used to generate the system message.
65
+ By default, it's type is aiflows.prompt_template.JinjaPrompt. It's default parameters are:
66
  - `template` (str): The template of the prompt. Default: see ControllerAtomicFlow.yaml for the default template.
67
  - `input_variables` (List[str]): The input variables of the prompt. Default: ["commands"]. Note that the commands are the commands of the executor
68
  (subflows of branching flow) and are actually to the system prompt template via the `_build_commands_manual` function of this class.
69
  - `human_message_prompt_template` (Dict[str, Any]): The prompt template of the human/user message (message used everytime the except the first time in).
70
+ It's passed as the user message to the LLM. By default its of type aiflows.prompt_template.JinjaPrompt and has the following parameters:
71
  - `template` (str): The template of the prompt. Default: see ControllerAtomicFlow.yaml for the default template.
72
  - `input_variables` (List[str]): The input variables of the prompt. Default: ["observation"]
73
  - init_human_message_prompt_template` (Dict[str, Any]): The prompt template of the human/user message used to initialize the conversation
74
  (first time in). It is used to generate the human message. It's passed as the user message to the LLM.
75
+ By default its of type aiflows.prompt_template.JinjaPrompt and has the following parameters:
76
  - `template` (str): The template of the prompt. Default: see ControllerAtomicFlow.yaml for the default template.
77
  - `input_variables` (List[str]): The input variables of the prompt. Default: ["goal"]
78
  - All other parameters are inherited from the default configuration of ChatAtomicFlow (see Flowcard, i.e. README.md, of ChatAtomicFlowModule).
WikiSearchAtomicFlow.py CHANGED
@@ -2,9 +2,9 @@ from copy import deepcopy
2
 
3
  from typing import List, Dict, Optional, Any
4
 
5
- from flows.base_flows import AtomicFlow
6
 
7
- from flows.utils import logging
8
  from .wikipediaAPI import WikipediaAPIWrapper
9
 
10
  log = logging.get_logger(__name__)
 
2
 
3
  from typing import List, Dict, Optional, Any
4
 
5
+ from aiflows.base_flows import AtomicFlow
6
 
7
+ from aiflows.utils import logging
8
  from .wikipediaAPI import WikipediaAPIWrapper
9
 
10
  log = logging.get_logger(__name__)
__init__.py CHANGED
@@ -2,7 +2,7 @@
2
  dependencies = [
3
  {"url": "aiflows/ChatFlowModule", "revision": "main"},
4
  ]
5
- from flows import flow_verse
6
 
7
  flow_verse.sync_dependencies(dependencies)
8
  # ~~~
 
2
  dependencies = [
3
  {"url": "aiflows/ChatFlowModule", "revision": "main"},
4
  ]
5
+ from aiflows import flow_verse
6
 
7
  flow_verse.sync_dependencies(dependencies)
8
  # ~~~
demo.yaml CHANGED
@@ -1,11 +1,11 @@
1
  flow:
2
- _target_: aiflows.ControllerExecutorFlowModule.ControllerExecutorFlow.instantiate_from_default_config
3
  max_rounds: 30
4
 
5
  ### Subflows specification
6
  subflows_config:
7
  Controller:
8
- _target_: aiflows.ControllerExecutorFlowModule.ControllerAtomicFlow.instantiate_from_default_config
9
  commands:
10
  wiki_search:
11
  description: "Performs a search on Wikipedia."
@@ -14,14 +14,14 @@ flow:
14
  description: "Signal that the objective has been satisfied, and returns the answer to the user."
15
  input_args: [ "answer" ]
16
  backend:
17
- _target_: flows.backends.llm_lite.LiteLLMBackend
18
  api_infos: ???
19
  model_name:
20
  openai: "gpt-3.5-turbo"
21
  azure: "azure/gpt-4"
22
 
23
  Executor:
24
- _target_: flows.base_flows.BranchingFlow.instantiate_from_default_config
25
  subflows_config:
26
  wiki_search:
27
- _target_: aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow.instantiate_from_default_config
 
1
  flow:
2
+ _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerExecutorFlow.instantiate_from_default_config
3
  max_rounds: 30
4
 
5
  ### Subflows specification
6
  subflows_config:
7
  Controller:
8
+ _target_: flow_modules.aiflows.ControllerExecutorFlowModule.ControllerAtomicFlow.instantiate_from_default_config
9
  commands:
10
  wiki_search:
11
  description: "Performs a search on Wikipedia."
 
14
  description: "Signal that the objective has been satisfied, and returns the answer to the user."
15
  input_args: [ "answer" ]
16
  backend:
17
+ _target_: aiflows.backends.llm_lite.LiteLLMBackend
18
  api_infos: ???
19
  model_name:
20
  openai: "gpt-3.5-turbo"
21
  azure: "azure/gpt-4"
22
 
23
  Executor:
24
+ _target_: aiflows.base_flows.BranchingFlow.instantiate_from_default_config
25
  subflows_config:
26
  wiki_search:
27
+ _target_: flow_modules.aiflows.ControllerExecutorFlowModule.WikiSearchAtomicFlow.instantiate_from_default_config
run.py CHANGED
@@ -2,13 +2,13 @@ import os
2
 
3
  import hydra
4
 
5
- import flows
6
- from flows.flow_launchers import FlowLauncher
7
- from flows.backends.api_info import ApiInfo
8
- from flows.utils.general_helpers import read_yaml_file
9
 
10
- from flows import logging
11
- from flows.flow_cache import CACHING_PARAMETERS, clear_cache
12
 
13
  CACHING_PARAMETERS.do_caching = False # Set to True in order to disable caching
14
  # clear_cache() # Uncomment this line to clear the cache
@@ -18,7 +18,7 @@ logging.set_verbosity_debug()
18
  dependencies = [
19
  {"url": "aiflows/ControllerExecutorFlowModule", "revision": os.getcwd()},
20
  ]
21
- from flows import flow_verse
22
 
23
  flow_verse.sync_dependencies(dependencies)
24
 
 
2
 
3
  import hydra
4
 
5
+ import aiflows
6
+ from aiflows.flow_launchers import FlowLauncher
7
+ from aiflows.backends.api_info import ApiInfo
8
+ from aiflows.utils.general_helpers import read_yaml_file
9
 
10
+ from aiflows import logging
11
+ from aiflows.flow_cache import CACHING_PARAMETERS, clear_cache
12
 
13
  CACHING_PARAMETERS.do_caching = False # Set to True in order to disable caching
14
  # clear_cache() # Uncomment this line to clear the cache
 
18
  dependencies = [
19
  {"url": "aiflows/ControllerExecutorFlowModule", "revision": os.getcwd()},
20
  ]
21
+ from aiflows import flow_verse
22
 
23
  flow_verse.sync_dependencies(dependencies)
24