File size: 1,535 Bytes
4f4d036 6a1e351 4f4d036 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
input_interface: # Connector between the "input data" and the Flow
_target_: flows.interfaces.KeyInterface
additional_transformations:
- _target_: flows.data_transformations.KeyMatchInput # Pass the input parameters specified by the flow
output_interface: # Connector between the Flow's output and the caller
_target_: flows.interfaces.KeyInterface
keys_to_rename:
api_output: answer # Rename the api_output to answer
flow: # Overrides the OpenAIChatAtomicFlow config
_target_: aiflows.OpenAIChatFlowModule.OpenAIChatAtomicFlow.instantiate_from_default_config
name: "SimpleQA_Flow"
description: "A flow that answers questions."
# ~~~ Input interface specification ~~~
input_interface_non_initialized:
- "question"
# ~~~ OpenAI model parameters ~~
model: "gpt-3.5-turbo"
generation_parameters:
n: 1
max_tokens: 3000
temperature: 0.3
model_kwargs:
top_p: 0.2
frequency_penalty: 0
presence_penalty: 0
n_api_retries: 6
wait_time_between_retries: 20
# ~~~ Prompt specification ~~~
system_message_prompt_template:
_target_: langchain.PromptTemplate
template: |2-
You are a helpful chatbot that truthfully answers questions.
input_variables: []
partial_variables: {}
template_format: jinja2
init_human_message_prompt_template:
_target_: langchain.PromptTemplate
template: |2-
Answer the following question: {{question}}
input_variables: ["question"]
partial_variables: {}
template_format: jinja2
|