Commit
·
6175374
1
Parent(s):
0e9cb09
First commit.
Browse files- OpenAIChatHumanFlowModule.py +14 -0
- OpenAIChatHumanFlowModule.yaml +63 -0
- README.md +21 -0
- __init__.py +11 -0
- pip_requirements.py +0 -0
- run.py +63 -0
OpenAIChatHumanFlowModule.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from flows.base_flows import CircularFlow
|
| 2 |
+
from flows.utils import logging
|
| 3 |
+
|
| 4 |
+
log = logging.get_logger(__name__)
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class OpenAIChatHumanFlowModule(CircularFlow):
|
| 8 |
+
|
| 9 |
+
def __init__(self, **kwargs):
|
| 10 |
+
super().__init__(**kwargs)
|
| 11 |
+
|
| 12 |
+
@classmethod
|
| 13 |
+
def type(cls):
|
| 14 |
+
return "OpenAIChatHumanFlowModule"
|
OpenAIChatHumanFlowModule.yaml
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: "OpenAIChatInteractiveFlow"
|
| 2 |
+
description: "Flow for chatting with an OpenAI Flow and a user providing the input."
|
| 3 |
+
|
| 4 |
+
max_rounds: null # Run until early exit is detected
|
| 5 |
+
|
| 6 |
+
input_keys: null # Should be specified by the user
|
| 7 |
+
|
| 8 |
+
output_keys: null # Should be specified by the user
|
| 9 |
+
|
| 10 |
+
subflows_config:
|
| 11 |
+
Assistant:
|
| 12 |
+
_target_: aiflows.OpenAIChatFlowModule.OpenAIChatAtomicFlow.instantiate_from_default_config
|
| 13 |
+
model_name: "gpt-4"
|
| 14 |
+
User:
|
| 15 |
+
_target_: aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow.instantiate_from_default_config
|
| 16 |
+
|
| 17 |
+
topology:
|
| 18 |
+
- goal: "Query the assistant"
|
| 19 |
+
|
| 20 |
+
### Input Interface
|
| 21 |
+
input_interface:
|
| 22 |
+
_target_: flows.interfaces.KeyInterface
|
| 23 |
+
additional_transformations:
|
| 24 |
+
- _target_: flows.data_transformations.KeyMatchInput
|
| 25 |
+
|
| 26 |
+
### Flow Specification
|
| 27 |
+
flow: Assistant
|
| 28 |
+
reset_every_round: false
|
| 29 |
+
|
| 30 |
+
### Output Interface
|
| 31 |
+
output_interface:
|
| 32 |
+
_target_: flows.interfaces.KeyInterface
|
| 33 |
+
additional_transformations:
|
| 34 |
+
- _target_: flows.data_transformations.PrintPreviousMessages
|
| 35 |
+
|
| 36 |
+
- goal: "Ask the user for input"
|
| 37 |
+
|
| 38 |
+
### Input Interface
|
| 39 |
+
input_interface:
|
| 40 |
+
_target_: flows.interfaces.KeyInterface
|
| 41 |
+
additional_transformations:
|
| 42 |
+
- _target_: flows.data_transformations.KeyMatchInput
|
| 43 |
+
|
| 44 |
+
### Flow Specification
|
| 45 |
+
flow: User
|
| 46 |
+
reset_every_round: true
|
| 47 |
+
|
| 48 |
+
### Output Interface
|
| 49 |
+
output_interface:
|
| 50 |
+
_target_: flows.interfaces.KeyInterface
|
| 51 |
+
additional_transformations:
|
| 52 |
+
- _target_: flows.data_transformations.RegexFirstOccurrenceExtractor
|
| 53 |
+
regex: '(?<=```answer)([\s\S]*?)(?=```)'
|
| 54 |
+
input_key: "human_input"
|
| 55 |
+
output_key: "answer"
|
| 56 |
+
strip: True
|
| 57 |
+
assert_unique: True
|
| 58 |
+
- _target_: flows.data_transformations.EndOfInteraction
|
| 59 |
+
end_of_interaction_string: "<END>"
|
| 60 |
+
input_key: "human_input"
|
| 61 |
+
output_key: "end_of_interaction"
|
| 62 |
+
|
| 63 |
+
early_exit_key: "end_of_interaction"
|
README.md
CHANGED
|
@@ -1,3 +1,24 @@
|
|
| 1 |
---
|
| 2 |
license: mit
|
| 3 |
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
---
|
| 2 |
license: mit
|
| 3 |
---
|
| 4 |
+
## Description
|
| 5 |
+
|
| 6 |
+
< Flow description >
|
| 7 |
+
|
| 8 |
+
## Configuration parameters
|
| 9 |
+
|
| 10 |
+
< Name 1 > (< Type 1 >): < Description 1 >. Required parameter.
|
| 11 |
+
|
| 12 |
+
< Name 2 > (< Type 2 >): < Description 2 >. Default value is: < value 2 >
|
| 13 |
+
|
| 14 |
+
## Input interface
|
| 15 |
+
|
| 16 |
+
< Name 1 > (< Type 1 >): < Description 1 >.
|
| 17 |
+
|
| 18 |
+
(Note that the interface might depend on the state of the Flow.)
|
| 19 |
+
|
| 20 |
+
## Output interface
|
| 21 |
+
|
| 22 |
+
< Name 1 > (< Type 1 >): < Description 1 >.
|
| 23 |
+
|
| 24 |
+
(Note that the interface might depend on the state of the Flow.)
|
__init__.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ~~~ Specify the dependencies ~~~
|
| 2 |
+
dependencies = [
|
| 3 |
+
{"url": "aiflows/OpenAIChatFlowModule", "revision": "6a1e351a915f00193f18f3da3b61c497df1d31a3"},
|
| 4 |
+
{"url": "aiflows/HumanStandardInputFlowModule", "revision": "890e92da1fefbae642fd84296e31bca7f61ea710"},
|
| 5 |
+
]
|
| 6 |
+
from flows import flow_verse
|
| 7 |
+
|
| 8 |
+
flow_verse.sync_dependencies(dependencies)
|
| 9 |
+
# ~~~
|
| 10 |
+
|
| 11 |
+
from .OpenAIChatHumanFlowModule import OpenAIChatHumanFlowModule
|
pip_requirements.py
ADDED
|
File without changes
|
run.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A simple script to run a Flow that can be used for development and debugging."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
|
| 5 |
+
import hydra
|
| 6 |
+
|
| 7 |
+
import flows
|
| 8 |
+
from flows.flow_launchers import FlowLauncher, ApiInfo
|
| 9 |
+
from flows.utils.general_helpers import read_yaml_file
|
| 10 |
+
|
| 11 |
+
from flows import logging
|
| 12 |
+
from flows.flow_cache import CACHING_PARAMETERS, clear_cache
|
| 13 |
+
|
| 14 |
+
CACHING_PARAMETERS.do_caching = False # Set to True to enable caching
|
| 15 |
+
# clear_cache() # Uncomment this line to clear the cache
|
| 16 |
+
|
| 17 |
+
logging.set_verbosity_debug()
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
if __name__ == "__main__":
|
| 21 |
+
# ~~~ Set the API information ~~~
|
| 22 |
+
# OpenAI backend
|
| 23 |
+
# api_information = ApiInfo("openai", os.getenv("OPENAI_API_KEY"))
|
| 24 |
+
# Azure backend
|
| 25 |
+
api_information = ApiInfo("azure", os.getenv("AZURE_OPENAI_KEY"), os.getenv("AZURE_OPENAI_ENDPOINT"))
|
| 26 |
+
|
| 27 |
+
# ~~~ Instantiate the Flow ~~~
|
| 28 |
+
root_dir = "."
|
| 29 |
+
cfg_path = os.path.join(root_dir, "FlowName.yaml")
|
| 30 |
+
cfg = read_yaml_file(cfg_path)
|
| 31 |
+
|
| 32 |
+
flow_with_interfaces = {
|
| 33 |
+
"flow": hydra.utils.instantiate(cfg['flow'], _recursive_=False, _convert_="partial"),
|
| 34 |
+
"input_interface": (
|
| 35 |
+
None
|
| 36 |
+
if getattr(cfg, "input_interface", None) is None
|
| 37 |
+
else hydra.utils.instantiate(cfg['input_interface'], _recursive_=False)
|
| 38 |
+
),
|
| 39 |
+
"output_interface": (
|
| 40 |
+
None
|
| 41 |
+
if getattr(cfg, "output_interface", None) is None
|
| 42 |
+
else hydra.utils.instantiate(cfg['output_interface'], _recursive_=False)
|
| 43 |
+
),
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
# ~~~ Get the data ~~~
|
| 47 |
+
# This can be a list of samples
|
| 48 |
+
data = {"id": 0} # Add your data here
|
| 49 |
+
|
| 50 |
+
# ~~~ Run inference ~~~
|
| 51 |
+
path_to_output_file = None
|
| 52 |
+
# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk
|
| 53 |
+
|
| 54 |
+
_, outputs = FlowLauncher.launch(
|
| 55 |
+
flow_with_interfaces=flow_with_interfaces,
|
| 56 |
+
data=data,
|
| 57 |
+
path_to_output_file=path_to_output_file,
|
| 58 |
+
api_information=api_information,
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
# ~~~ Print the output ~~~
|
| 62 |
+
flow_output_data = outputs[0]
|
| 63 |
+
print(flow_output_data)
|