Spaces:
Sleeping
Sleeping
File size: 1,879 Bytes
0e13326 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | import { NextResponse } from 'next/server'
import { loadBenchmarks } from '@/lib/benchmarks'
import { buildSystemPrompt, formatMetricsMessage } from '@/lib/prompts'
import { chat } from '@/lib/llm'
import { InterpretationReportSchema, MetricsInputSchema, TeamContextSchema } from '@/lib/schema'
export async function POST(req: Request) {
try {
const body = await req.json()
const metricsResult = MetricsInputSchema.safeParse(body.metrics)
const contextResult = TeamContextSchema.safeParse(body.context)
if (!metricsResult.success || !contextResult.success) {
return NextResponse.json(
{
error: 'Invalid input',
details: {
metrics: metricsResult.error?.issues,
context: contextResult.error?.issues,
},
},
{ status: 400 }
)
}
const benchmarks = await loadBenchmarks()
const response = await chat({
system: buildSystemPrompt(benchmarks),
user: formatMetricsMessage(metricsResult.data, contextResult.data),
jsonMode: true,
})
const content = response.choices[0]?.message?.content
if (!content) {
return NextResponse.json({ error: 'No response from LLM' }, { status: 502 })
}
// Extract JSON if wrapped in markdown fences
const jsonMatch = content.match(/\{[\s\S]*\}/)
const raw = jsonMatch ? JSON.parse(jsonMatch[0]) : JSON.parse(content)
const reportResult = InterpretationReportSchema.safeParse(raw)
if (!reportResult.success) {
return NextResponse.json(
{ error: 'LLM returned invalid report structure', details: reportResult.error.issues },
{ status: 422 }
)
}
return NextResponse.json(reportResult.data)
} catch (err) {
console.error('interpret error:', err)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
|