ruben de la fuente
feat: initial deployment to HuggingFace Spaces
0e13326
import { NextResponse } from 'next/server'
import { loadBenchmarks } from '@/lib/benchmarks'
import { buildSystemPrompt, formatMetricsMessage } from '@/lib/prompts'
import { chat } from '@/lib/llm'
import { InterpretationReportSchema, MetricsInputSchema, TeamContextSchema } from '@/lib/schema'
export async function POST(req: Request) {
try {
const body = await req.json()
const metricsResult = MetricsInputSchema.safeParse(body.metrics)
const contextResult = TeamContextSchema.safeParse(body.context)
if (!metricsResult.success || !contextResult.success) {
return NextResponse.json(
{
error: 'Invalid input',
details: {
metrics: metricsResult.error?.issues,
context: contextResult.error?.issues,
},
},
{ status: 400 }
)
}
const benchmarks = await loadBenchmarks()
const response = await chat({
system: buildSystemPrompt(benchmarks),
user: formatMetricsMessage(metricsResult.data, contextResult.data),
jsonMode: true,
})
const content = response.choices[0]?.message?.content
if (!content) {
return NextResponse.json({ error: 'No response from LLM' }, { status: 502 })
}
// Extract JSON if wrapped in markdown fences
const jsonMatch = content.match(/\{[\s\S]*\}/)
const raw = jsonMatch ? JSON.parse(jsonMatch[0]) : JSON.parse(content)
const reportResult = InterpretationReportSchema.safeParse(raw)
if (!reportResult.success) {
return NextResponse.json(
{ error: 'LLM returned invalid report structure', details: reportResult.error.issues },
{ status: 422 }
)
}
return NextResponse.json(reportResult.data)
} catch (err) {
console.error('interpret error:', err)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}