Lite / reasoning.py
Rivalcoder
Add Files -Update New
ba773e9
raw
history blame contribute delete
633 Bytes
from transformers import pipeline
# Load lightweight reasoning model
reasoner = pipeline("text2text-generation", model="google/flan-t5-large")
def generate_reasoning(summary, question):
prompt = f"""
Audio Summary:
Speech: {summary['transcription']}
Main Sound Event: {summary['sound_event']}
Emotion: {summary['emotion']}
Speakers: {summary['speakers']}
Question: {question}
Provide a detailed reasoning-based answer using the audio cues.
"""
# Use only max_new_tokens to avoid Hugging Face warning about max_length+max_new_tokens.
result = reasoner(prompt, max_new_tokens=256)[0]["generated_text"]
return result