renamed flow to aiflows
Browse files- ChatWithDemonstrationsFlow.py +2 -2
- ChatWithDemonstrationsFlow.yaml +6 -6
- DemonstrationsAtomicFlow.py +4 -4
- DemonstrationsAtomicFlow.yaml +2 -2
- README.md +4 -4
- __init__.py +1 -1
- demo.yaml +4 -4
- run.py +7 -7
ChatWithDemonstrationsFlow.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
|
2 |
|
3 |
-
from
|
4 |
-
from
|
5 |
|
6 |
logging.set_verbosity_debug()
|
7 |
|
|
|
1 |
|
2 |
|
3 |
+
from aiflows.base_flows import SequentialFlow
|
4 |
+
from aiflows.utils import logging
|
5 |
|
6 |
logging.set_verbosity_debug()
|
7 |
|
ChatWithDemonstrationsFlow.yaml
CHANGED
@@ -3,25 +3,25 @@ description: "A sequential flow that answers questions with demonstrations"
|
|
3 |
|
4 |
subflows_config:
|
5 |
demonstration_flow:
|
6 |
-
_target_: aiflows.ChatWithDemonstrationsFlowModule.DemonstrationsAtomicFlow.instantiate_from_default_config
|
7 |
|
8 |
chat_flow:
|
9 |
-
_target_: aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config
|
10 |
|
11 |
topology:
|
12 |
- goal: Get Demonstrations
|
13 |
input_interface:
|
14 |
-
_target_:
|
15 |
flow: demonstration_flow
|
16 |
output_interface:
|
17 |
-
_target_:
|
18 |
|
19 |
- goal: Answer the question
|
20 |
input_interface:
|
21 |
-
_target_:
|
22 |
flow: chat_flow
|
23 |
output_interface:
|
24 |
-
_target_:
|
25 |
keys_to_rename:
|
26 |
api_output: answer # Rename the api_output to answer
|
27 |
|
|
|
3 |
|
4 |
subflows_config:
|
5 |
demonstration_flow:
|
6 |
+
_target_: flow_modules.aiflows.ChatWithDemonstrationsFlowModule.DemonstrationsAtomicFlow.instantiate_from_default_config
|
7 |
|
8 |
chat_flow:
|
9 |
+
_target_: flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config
|
10 |
|
11 |
topology:
|
12 |
- goal: Get Demonstrations
|
13 |
input_interface:
|
14 |
+
_target_: aiflows.interfaces.KeyInterface
|
15 |
flow: demonstration_flow
|
16 |
output_interface:
|
17 |
+
_target_: aiflows.interfaces.KeyInterface
|
18 |
|
19 |
- goal: Answer the question
|
20 |
input_interface:
|
21 |
+
_target_: aiflows.interfaces.KeyInterface
|
22 |
flow: chat_flow
|
23 |
output_interface:
|
24 |
+
_target_: aiflows.interfaces.KeyInterface
|
25 |
keys_to_rename:
|
26 |
api_output: answer # Rename the api_output to answer
|
27 |
|
DemonstrationsAtomicFlow.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import jinja2
|
2 |
-
from
|
3 |
-
from
|
4 |
-
from
|
5 |
from typing import Dict,Any,Optional,List
|
6 |
-
from
|
7 |
from copy import deepcopy
|
8 |
import os
|
9 |
import hydra
|
|
|
1 |
import jinja2
|
2 |
+
from aiflows.base_flows import AtomicFlow
|
3 |
+
from aiflows.utils import logging
|
4 |
+
from aiflows.utils import general_helpers
|
5 |
from typing import Dict,Any,Optional,List
|
6 |
+
from aiflows.prompt_template import JinjaPrompt
|
7 |
from copy import deepcopy
|
8 |
import os
|
9 |
import hydra
|
DemonstrationsAtomicFlow.yaml
CHANGED
@@ -12,7 +12,7 @@ params:
|
|
12 |
demonstrations_k: null
|
13 |
|
14 |
query_prompt_template:
|
15 |
-
_target_:
|
16 |
|
17 |
response_prompt_template:
|
18 |
-
_target_:
|
|
|
12 |
demonstrations_k: null
|
13 |
|
14 |
query_prompt_template:
|
15 |
+
_target_: aiflows.prompt_template.JinjaPrompt
|
16 |
|
17 |
response_prompt_template:
|
18 |
+
_target_: aiflows.prompt_template.JinjaPrompt
|
README.md
CHANGED
@@ -48,11 +48,11 @@ Default: No default value this field must be set.
|
|
48 |
- `demonstrations_k` (int): The number of demonstrations to pass to the ChatFlow.
|
49 |
If None, all the demonstrations are passed to the ChatFlow. Default: None
|
50 |
- `query_prompt_template` (Dict[str, Any]): The prompt template used to generate the query of the demonstrations.
|
51 |
-
By default its of type
|
52 |
-
wants to use the query_prompt_template. Default parameters are defined in
|
53 |
-
- `response_prompt_template` (Dict[str, Any]): The prompt template used to generate the response of the demonstrations. By default its of type
|
54 |
None of the parameters of the prompt are defined by default and therefore need to be defined if one
|
55 |
-
wants to use the response_prompt_template. Default parameters are defined in
|
56 |
|
57 |
*Input Interface*:
|
58 |
|
|
|
48 |
- `demonstrations_k` (int): The number of demonstrations to pass to the ChatFlow.
|
49 |
If None, all the demonstrations are passed to the ChatFlow. Default: None
|
50 |
- `query_prompt_template` (Dict[str, Any]): The prompt template used to generate the query of the demonstrations.
|
51 |
+
By default its of type aiflows.prompt_template.JinjaPrompt. None of the parameters of the prompt are defined by default and therefore need to be defined if one
|
52 |
+
wants to use the query_prompt_template. Default parameters are defined in aiflows.prompt_template.jinja2_prompts.JinjaPrompt.
|
53 |
+
- `response_prompt_template` (Dict[str, Any]): The prompt template used to generate the response of the demonstrations. By default its of type aiflows.prompt_template.JinjaPrompt.
|
54 |
None of the parameters of the prompt are defined by default and therefore need to be defined if one
|
55 |
+
wants to use the response_prompt_template. Default parameters are defined in aiflows.prompt_template.jinja2_prompts.JinjaPrompt.
|
56 |
|
57 |
*Input Interface*:
|
58 |
|
__init__.py
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
dependencies = [
|
3 |
{"url": "aiflows/ChatFlowModule", "revision": "main"},
|
4 |
]
|
5 |
-
from
|
6 |
flow_verse.sync_dependencies(dependencies)
|
7 |
from .ChatWithDemonstrationsFlow import ChatWithDemonstrationsFlow
|
8 |
from .DemonstrationsAtomicFlow import DemonstrationsAtomicFlow
|
|
|
2 |
dependencies = [
|
3 |
{"url": "aiflows/ChatFlowModule", "revision": "main"},
|
4 |
]
|
5 |
+
from aiflows import flow_verse
|
6 |
flow_verse.sync_dependencies(dependencies)
|
7 |
from .ChatWithDemonstrationsFlow import ChatWithDemonstrationsFlow
|
8 |
from .DemonstrationsAtomicFlow import DemonstrationsAtomicFlow
|
demo.yaml
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
flow: # Overrides the OpenAIChatAtomicFlow config
|
2 |
-
_target_: aiflows.ChatWithDemonstrationsFlowModule.ChatWithDemonstrationsFlow.instantiate_from_default_config
|
3 |
name: "SimpleQA_Flow_with_Demonstrations"
|
4 |
description: "A sequential flow that answers questions with demonstrations"
|
5 |
|
@@ -41,7 +41,7 @@ flow: # Overrides the OpenAIChatAtomicFlow config
|
|
41 |
|
42 |
# ~~~ backend model parameters ~~
|
43 |
backend:
|
44 |
-
_target_:
|
45 |
api_infos: ???
|
46 |
model_name:
|
47 |
openai: "gpt-3.5-turbo"
|
@@ -61,7 +61,7 @@ flow: # Overrides the OpenAIChatAtomicFlow config
|
|
61 |
|
62 |
# ~~~ Prompt specification ~~~
|
63 |
system_message_prompt_template:
|
64 |
-
_target_:
|
65 |
template: |2-
|
66 |
You are a helpful chatbot that truthfully answers questions. Answer in a similar way to your previous replies.
|
67 |
input_variables: []
|
@@ -69,7 +69,7 @@ flow: # Overrides the OpenAIChatAtomicFlow config
|
|
69 |
|
70 |
|
71 |
init_human_message_prompt_template:
|
72 |
-
_target_:
|
73 |
template: |2-
|
74 |
Answer the following question: {{question}}
|
75 |
input_variables: ["question"]
|
|
|
1 |
flow: # Overrides the OpenAIChatAtomicFlow config
|
2 |
+
_target_: flow_modules.aiflows.ChatWithDemonstrationsFlowModule.ChatWithDemonstrationsFlow.instantiate_from_default_config
|
3 |
name: "SimpleQA_Flow_with_Demonstrations"
|
4 |
description: "A sequential flow that answers questions with demonstrations"
|
5 |
|
|
|
41 |
|
42 |
# ~~~ backend model parameters ~~
|
43 |
backend:
|
44 |
+
_target_: aiflows.backends.llm_lite.LiteLLMBackend
|
45 |
api_infos: ???
|
46 |
model_name:
|
47 |
openai: "gpt-3.5-turbo"
|
|
|
61 |
|
62 |
# ~~~ Prompt specification ~~~
|
63 |
system_message_prompt_template:
|
64 |
+
_target_: aiflows.prompt_template.JinjaPrompt
|
65 |
template: |2-
|
66 |
You are a helpful chatbot that truthfully answers questions. Answer in a similar way to your previous replies.
|
67 |
input_variables: []
|
|
|
69 |
|
70 |
|
71 |
init_human_message_prompt_template:
|
72 |
+
_target_: aiflows.prompt_template.JinjaPrompt
|
73 |
template: |2-
|
74 |
Answer the following question: {{question}}
|
75 |
input_variables: ["question"]
|
run.py
CHANGED
@@ -2,20 +2,20 @@ import os
|
|
2 |
|
3 |
import hydra
|
4 |
|
5 |
-
import
|
6 |
-
from
|
7 |
-
from
|
8 |
-
from
|
9 |
|
10 |
-
from
|
11 |
-
from
|
12 |
|
13 |
CACHING_PARAMETERS.do_caching = False # Set to True in order to disable caching
|
14 |
# clear_cache() # Uncomment this line to clear the cache
|
15 |
|
16 |
logging.set_verbosity_debug() # Uncomment this line to see verbose logs
|
17 |
|
18 |
-
from
|
19 |
|
20 |
|
21 |
dependencies = [
|
|
|
2 |
|
3 |
import hydra
|
4 |
|
5 |
+
import aiflows
|
6 |
+
from aiflows.flow_launchers import FlowLauncher
|
7 |
+
from aiflows.backends.api_info import ApiInfo
|
8 |
+
from aiflows.utils.general_helpers import read_yaml_file
|
9 |
|
10 |
+
from aiflows import logging
|
11 |
+
from aiflows.flow_cache import CACHING_PARAMETERS, clear_cache
|
12 |
|
13 |
CACHING_PARAMETERS.do_caching = False # Set to True in order to disable caching
|
14 |
# clear_cache() # Uncomment this line to clear the cache
|
15 |
|
16 |
logging.set_verbosity_debug() # Uncomment this line to see verbose logs
|
17 |
|
18 |
+
from aiflows import flow_verse
|
19 |
|
20 |
|
21 |
dependencies = [
|