Spaces:
Runtime error
Runtime error
| from scripts.history.services import ( | |
| load_scenario_history , save_scenario_history , | |
| load_chat_history , save_chat_history , | |
| load_reason_history , save_reason_history | |
| ) | |
| from scripts.services.services import json_to_google_chat | |
| from google.genai import types | |
| import ast | |
| from scripts.llm.runner import run_gemini , run_groq | |
| from scripts.routers.services import get_latest_scenario_details | |
| async def add_scenario_route(scenario_id , query , gemini_client , model) : | |
| history : list = await load_scenario_history(scenario_id) | |
| history.append( | |
| { | |
| 'role' : 'user' , | |
| 'content' : query | |
| } | |
| ) | |
| contents : list = await json_to_google_chat(history) | |
| with open('assets/prompts/scenario_creation.md') as system_prompt_file : system_prompt : str = system_prompt_file.read() | |
| generation_config = types.GenerateContentConfig( | |
| response_mime_type = 'text/plain' , | |
| system_instruction = [types.Part.from_text(text = system_prompt)] | |
| ) | |
| response : str = await run_gemini( | |
| gemini_client , | |
| contents , | |
| generation_config , | |
| model | |
| ) | |
| cleaned_response : str = response.replace('json' , '').replace('`' , '') | |
| try : dict_response : dict = ast.literal_eval(cleaned_response) | |
| except : dict_response = {} | |
| history.append( | |
| { | |
| 'role' : 'assistant' , | |
| 'content' : dict_response | |
| } | |
| ) | |
| await save_scenario_history(history , scenario_id) | |
| return dict_response | |
| async def ask_reason_route( | |
| scenario_id , | |
| session_id , | |
| query , | |
| gemini_client , | |
| model | |
| ) : | |
| history : list = await load_reason_history(scenario_id , session_id) | |
| history.append( | |
| { | |
| 'role' : 'user' , | |
| 'content' : query | |
| } | |
| ) | |
| contents : list = await json_to_google_chat(history) | |
| scenario_prompt : str = await get_latest_scenario_details(scenario_id) | |
| chat_history : list = await load_chat_history(scenario_id , session_id) | |
| with open('assets/prompts/scenario_reason.md') as system_prompt_file : system_prompt = system_prompt_file.read() | |
| generation_config = types.GenerateContentConfig( | |
| response_mime_type = 'text/plain' , | |
| system_instruction = [types.Part.from_text(text = system_prompt.format(scenario_prompt , chat_history))] | |
| ) | |
| response : str = await run_gemini( | |
| gemini_client , | |
| contents , | |
| generation_config , | |
| model | |
| ) | |
| history.append( | |
| { | |
| 'role' : 'assistant' , | |
| 'content' : response | |
| } | |
| ) | |
| await save_reason_history(history , scenario_id, session_id) | |
| return response | |
| async def ask_route(query , scenario_id , session_id , groq_client) -> str : | |
| scenario_prompt : str = await get_latest_scenario_details(scenario_id) | |
| history : list = await load_chat_history(scenario_id , session_id) | |
| if history == [] : history = [{ | |
| 'role' : 'system' , | |
| 'content' : scenario_prompt | |
| }] | |
| history.append( | |
| { | |
| 'role' : 'user' , | |
| 'content' : query | |
| } | |
| ) | |
| response : str = await run_groq(history , groq_client) | |
| history.append( | |
| { | |
| 'role' : 'assistant' , | |
| 'content' : response | |
| } | |
| ) | |
| await save_chat_history(history , session_id , scenario_id) | |
| return response | |
| async def get_feedback_route(scenario_id , session_id , groq_client) -> str : | |
| history : list = await load_chat_history(scenario_id , session_id) | |
| with open('assets/prompts/feedback.md') as feedback_file : system_prompt : str = feedback_file.read() | |
| questions_for_feedback : list = await get_latest_scenario_details(scenario_id , 'questions_for_feedback') | |
| messages = [ | |
| { | |
| 'role' : 'system' , | |
| 'content' : system_prompt | |
| } , | |
| { | |
| 'role' : 'user' , | |
| 'content' : f''' | |
| History : {history} | |
| Qeustions for feedback : {questions_for_feedback} | |
| ''' | |
| } | |
| ] | |
| response : str = await run_groq(messages , groq_client) | |
| response = response.split('```json')[-1].split('`')[0] | |
| response = response.replace('`' , '') | |
| return response |