Spaces:
Sleeping
Sleeping
File size: 3,131 Bytes
b5e4f35 6d2b0f9 b5e4f35 6d2b0f9 b5e4f35 6d2b0f9 b5e4f35 6d2b0f9 b5e4f35 6d2b0f9 b5e4f35 6d2b0f9 b5e4f35 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 | import json
import logging
from app.agent.state import AgentState
from app.agent.prompts import (
SYSTEM_PROMPT,
INITIAL_QUESTIONS_PROMPT,
analyze_round_prompt,
next_questions_prompt,
generate_checklist_prompt,
generate_markdown_prompt,
)
from app.models.question import QuestionOut
from app.models.checklist import ChecklistItem
from app.services.llm import gemini_service
logger = logging.getLogger(__name__)
async def generate_initial_questions(state: AgentState) -> dict:
logger.info(f"Generating initial questions for session {state['session_id']}")
result = await gemini_service.generate_json(SYSTEM_PROMPT, INITIAL_QUESTIONS_PROMPT)
questions = [QuestionOut(id=q["id"], text=q["text"]) for q in result]
return {"current_questions": questions, "current_round": 1}
async def analyze_round(state: AgentState) -> dict:
round_num = state["current_round"]
logger.info(f"Analyzing round {round_num} for session {state['session_id']}")
# Build Q&A text for current round
round_answers = [a for a in state["all_answers"] if a.round_number == round_num]
qa_text = "\n".join(
f"В: {a.question_text}\nО: {a.audio_transcript}" for a in round_answers
)
prev_summaries = "\n---\n".join(state["round_summaries"])
result = await gemini_service.generate_json(
SYSTEM_PROMPT, analyze_round_prompt(round_num, qa_text, prev_summaries)
)
new_summaries = list(state["round_summaries"]) + [result["summary"]]
return {"round_summaries": new_summaries}
async def generate_next_questions(state: AgentState) -> dict:
next_round = state["current_round"] + 1
logger.info(f"Generating questions for round {next_round}, session {state['session_id']}")
all_summaries = "\n---\n".join(state["round_summaries"])
result = await gemini_service.generate_json(
SYSTEM_PROMPT, next_questions_prompt(next_round, all_summaries)
)
questions = [QuestionOut(id=q["id"], text=q["text"]) for q in result]
return {"current_questions": questions, "current_round": next_round}
async def generate_checklist(state: AgentState) -> dict:
logger.info(f"Generating checklist for session {state['session_id']}")
all_summaries = "\n---\n".join(state["round_summaries"])
all_qa = "\n\n".join(
f"Раунд {a.round_number} — В: {a.question_text}\nО: {a.audio_transcript}"
for a in state["all_answers"]
)
result = await gemini_service.generate_json(
SYSTEM_PROMPT, generate_checklist_prompt(all_summaries, all_qa)
)
items = [ChecklistItem(**item) for item in result]
return {"checklist_items": items, "is_complete": True}
async def generate_markdown(state: AgentState) -> dict:
logger.info(f"Generating markdown for session {state['session_id']}")
checklist_json = json.dumps(
[item.model_dump() for item in state["checklist_items"]], ensure_ascii=False, indent=2
)
result = await gemini_service.generate(
SYSTEM_PROMPT,
generate_markdown_prompt(checklist_json, state["session_id"]),
)
return {"markdown_content": result}
|