from groq import Groq from jsonschema import validate , ValidationError import json import time from databaseengine import DatabaseEngine de=DatabaseEngine() client=Groq(api_key="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") uniprot_sequence=''' FORMAT FOR retrieve_uniprot_sequence: {{ "operation": "retrieve_uniprot_sequence", "biological_inputs": {{ "gene_symbol": "HER2" }}, }} ''' BCL_TASK_FORMAT_FOR_EXP_V2=""" FORMAT FOR introduce_point_mutation: {{ "operation": "introduce_point_mutation", "biological_inputs": {{ "wildtype_sequence": "", "mutation": "S310F" }}, "depends": "retrieve_uniprot_sequence" }} FORMAT FOR predict_structure: {{ "operation":"predict_structure", "biological_inputs":{{ "sequence_for_structure":"" }} "depends": "domain_determination" }} FORMAT FOR analyze_epitopes: {{ "operation":"analyze_epitopes" "biological_inputs":{{ "structure":"" }} "depends": "predict_structure" }} FORMAT FOR domain_determination: {{ "operation":"domain_determination", "biological_inputs": {{ "sequence":"", }}, "depends":"introduce_point_mutation" }} FORMAT FOR fetch_nanobody_template: {{ "operation":"fetch_nanobody_template", "biological_inputs":{{ "nanobody":"" }}, "depends":"None" }} FORMAT FOR observe_orient_decide_act_loop: {{ "operation": "observe_orient_decide_act_loop", "biological_inputs": {{ "sequence": "", "raw_prompt": " 8: #frequent_messages=g_messages[1:4] g_messages=g_messages[-4:] g_messages.insert(0,actual_preserved_message) ''' filtered_chat_history=[m for m in frequent_messages if m["role"] in {"user", "assistant"}] response=client.chat.completions.create( model="llama-3.3-70b-versatile", messages=[ {"role":"system","content":PROMPT_FOR_SUMMARIZATION()}, {"role":"user","content":f""" CONVERSATION_HISTORY : {filtered_chat_history} """} ], stream=False, max_completion_tokens=5000, ) actual_response=response.choices[0].message.content g_messages.insert(1,{"role":"system","content":f""" Conversation History Summary L {json.loads(actual_response)} """}) ''' response = client.chat.completions.create( model="llama-3.3-70b-versatile", messages=g_messages, stream=False, max_completion_tokens=5000 ) response_message = response.choices[0].message.content '''----------Chat Response is updated here----------------------''' rm=[{"role":"assistant","content":response_message}] de.Update_Conversation(id,rm) '''-------------------------------------------------------------''' if isinstance(json.loads(response_message), dict) and "decision" in json.loads(response_message): return response_message else: time.sleep(5) constraints=self._BCL_CONSTRAINTS(userinput) print(constraints) BCL_SCHEMA={ "experiments":json.loads(response_message), "constraints_mode":"", "constraints":constraints } return BCL_SCHEMA