nbaldwin's picture
first version FunSearch
97e363b
import os
import hydra
import aiflows
from aiflows.backends.api_info import ApiInfo
from aiflows.utils.general_helpers import read_yaml_file, quick_load_api_keys
from aiflows import logging
from aiflows.flow_cache import CACHING_PARAMETERS, clear_cache
from aiflows.utils import serving
from aiflows.workers import run_dispatch_worker_thread
from aiflows.messages import FlowMessage
from aiflows.interfaces import KeyInterface
from aiflows.utils.colink_utils import start_colink_server
from aiflows import flow_verse
dependencies = [
{
"url": "aiflows/FunSearchFlowModule",
"revision": os.path.abspath("../")
}
]
flow_verse.sync_dependencies(dependencies)
logging.set_verbosity_debug()
if __name__ == "__main__":
cl = start_colink_server()
serving.recursive_serve_flow(
cl=cl,
flow_class_name="flow_modules.aiflows.FunSearchFlowModule.ProgramDBFlow",
flow_endpoint="ProgramDBFlow",
)
run_dispatch_worker_thread(cl)
config_overrides = read_yaml_file(os.path.join(".", "demo.yaml"))
funsearch_proxy = serving.get_flow_instance(
cl=cl,
flow_endpoint="ProgramDBFlow",
config_overrides=config_overrides,
)
data = {
'operation': 'register_program',
'content': {
'artifact': 'def solve_function(input) -> str:\n """Attempt at solving the problem given the input input and returns the predicted output (see the top of the file for problem description)"""\n return \'YES\'\n',
'scores_per_test':
{
'test_1':
{'score': 1.0, 'feedback': 'No feedback available.'},
'test_2':
{'score': 1.0, 'feedback': 'No feedback available.'},
'test_3': {'score': 0.0, 'feedback': 'No feedback available.'},
'test_4': {'score': -1, 'feedback': 'Invalid Format of prediction'}
}
}
}
input_message = funsearch_proxy.package_input_message(data = data)
funsearch_proxy.send_message(input_message)
data = {'operation': 'get_prompt', 'content': {}}
input_message = funsearch_proxy.package_input_message(data = data)
example_of_prompt = funsearch_proxy.get_reply_future(input_message).get_data()
data = {
"operation": "get_best_programs_per_island",
"content": {}
}
input_message = funsearch_proxy.package_input_message(data = data)
best_pg_per_island = funsearch_proxy.get_reply_future(input_message).get_data()
print("~~~Example of Prompt~~~")
print(example_of_prompt)
print("~~~Best Programs Per Island~~~")
print(best_pg_per_island)