flow: # Overrides the OpenAIChatAtomicFlow config _target_: aiflows.ChatWithDemonstrationsFlowModule.ChatWithDemonstrationsFlow.instantiate_from_default_config name: "SimpleQA_Flow_with_Demonstrations" description: "A sequential flow that answers questions with demonstrations" input_interface: # Connector between the "input data" and the Flow - "questions" output_interface: # Connector between the Flow's output and the caller - "answer" subflows_config: demonstration_flow: data: - query_data: query: "What is the capital of Turkey?" response_data: response: "Istambul, my sir." - query_data: query: "what is the capital of Germany?" response_data: response: "Berlin, my sir." params: data_dir: null demonstrations_id: my_sir_demo query_prompt_template: template: |2- Answer the following question: {{query}} input_variables: - "query" response_prompt_template: template: |2- {{response}} input_variables: - response chat_flow: name: "SimpleQA_Flow" # ~~~ Input interface specification ~~~ input_interface_non_initialized: - "question" # ~~~ backend model parameters ~~ backend: _target_: flows.backends.llm_lite.LiteLLMBackend api_infos: ??? model_name: openai: "gpt-3.5-turbo" azure: "azure/gpt-4" # ~~~ generation_parameters ~~ n: 1 max_tokens: 3000 temperature: 0.3 top_p: 0.2 frequency_penalty: 0 presence_penalty: 0 n_api_retries: 6 wait_time_between_retries: 20 # ~~~ Prompt specification ~~~ system_message_prompt_template: _target_: flows.prompt_template.JinjaPrompt template: |2- You are a helpful chatbot that truthfully answers questions. Answer in a similar way to your previous replies. input_variables: [] partial_variables: {} init_human_message_prompt_template: _target_: flows.prompt_template.JinjaPrompt template: |2- Answer the following question: {{question}} input_variables: ["question"] partial_variables: {}