File size: 2,055 Bytes
491e164 5b999e7 459ca8d 5b999e7 459ca8d 5b999e7 459ca8d 491e164 1bb4abc 491e164 c3abb01 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 |
import json
from copy import deepcopy
from typing import Any, Dict
from flow_modules.aiflows.ChatFlowModule import ChatAtomicFlow
class PlanGeneratorAtomicFlow(ChatAtomicFlow):
"""This class wraps around the Chat API to generate plan from a goal.
*Input Interface Non Initialized*:
- `goal`
*Input Interface Initialized*:
- `goal`
*Output Interface*:
- `plan`
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.hint_for_model = """
Make sure your response is in the following format:
Response Format:
{
"plan": "A step-by-step plan to finish the given goal, each step of plan should contain full information about writing a function",
}
"""
@classmethod
def instantiate_from_config(cls, config):
flow_config = deepcopy(config)
kwargs = {"flow_config": flow_config}
# ~~~ Set up prompts ~~~
kwargs.update(cls._set_up_prompts(flow_config))
# ~~~ Set up backend ~~~
kwargs.update(cls._set_up_backend(flow_config))
# ~~~ Instantiate flow ~~~
return cls(**kwargs)
def _update_prompts_and_input(self, input_data: Dict[str, Any]):
if 'goal' in input_data:
input_data['goal'] += self.hint_for_model
def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]:
self._update_prompts_and_input(input_data)
while True:
api_output = super().run(input_data)["api_output"].strip()
try:
response = json.loads(api_output)
return response
except (json.decoder.JSONDecodeError, json.JSONDecodeError):
new_goal = "The previous respond cannot be parsed with json.loads. Next time, do not provide any comments or code blocks. Make sure your next response is purely json parsable."
new_input_data = input_data.copy()
new_input_data['goal'] = new_goal
input_data = new_input_data |