Manus AI
Fix: Simplify invokeLLM to match the successful example exactly
9733766
import { ENV } from "./env";
export type Role = "system" | "user" | "assistant" | "tool" | "function";
export type Message = {
role: Role;
content: string;
};
export type InvokeParams = {
messages: Message[];
model?: string;
};
export type InvokeResult = {
choices: Array<{
message: {
role: Role;
content: string;
};
}>;
};
export async function invokeLLM(params: InvokeParams): Promise<InvokeResult> {
const apiKey = ENV.forgeApiKey || process.env.HF_TOKEN || process.env.HF_ACCESS_TOKEN;
// تحديد النموذج: كوين هو الافتراضي، أو أي نموذج آخر يتم تمريره (مثل ديب سيك)
const model = params.model || "huihui-ai/Qwen2.5-72B-Instruct-abliterated";
// الرابط المباشر الذي يعمل في مثالك
const apiUrl = `https://api-inference.huggingface.co/models/${model}/v1/chat/completions`;
console.log(`[LLM] Invoking ${model} directly at ${apiUrl}`);
const response = await fetch(apiUrl, {
method: "POST",
headers: {
"Content-Type": "application/json",
"Authorization": `Bearer ${apiKey}`,
},
body: JSON.stringify({
model: model,
messages: params.messages,
max_tokens: 2048,
temperature: 0.8,
}),
});
if (!response.ok) {
const errorText = await response.text();
console.error(`[LLM Error] Status: ${response.status}, Body: ${errorText}`);
throw new Error(`LLM invoke failed: ${response.status} - ${errorText}`);
}
return (await response.json()) as InvokeResult;
}