Spaces:
Sleeping
Sleeping
طيب دعنا ننتقل الي open router : sk-or-v1-6d04ed4ae4240bed4a2ee12fae9546
Browse files- src/app/api/ai/route.ts +9 -8
- src/lib/gemini-client.ts +12 -10
src/app/api/ai/route.ts
CHANGED
|
@@ -2,35 +2,36 @@
|
|
| 2 |
import { NextResponse } from 'next/server';
|
| 3 |
|
| 4 |
/**
|
| 5 |
-
* @fileOverview API Route وسيط للتعامل مع
|
| 6 |
*/
|
| 7 |
|
| 8 |
export async function POST(req: Request) {
|
| 9 |
try {
|
| 10 |
const { prompt, model } = await req.json();
|
| 11 |
-
const apiKey = process.env.
|
| 12 |
|
| 13 |
if (!apiKey) {
|
| 14 |
return NextResponse.json({ error: "API Key missing in .env" }, { status: 500 });
|
| 15 |
}
|
| 16 |
|
| 17 |
-
const response = await fetch("https://
|
| 18 |
method: 'POST',
|
| 19 |
headers: {
|
| 20 |
'Content-Type': 'application/json',
|
| 21 |
-
'Authorization': `Bearer ${apiKey.trim()}`
|
|
|
|
|
|
|
| 22 |
},
|
| 23 |
body: JSON.stringify({
|
| 24 |
-
model: model || "gemini-2.0-flash-lite-preview-02-05",
|
| 25 |
messages: [{ role: "user", content: prompt }],
|
| 26 |
-
temperature: 0.7
|
| 27 |
-
max_tokens: 4096
|
| 28 |
})
|
| 29 |
});
|
| 30 |
|
| 31 |
if (!response.ok) {
|
| 32 |
const errorText = await response.text();
|
| 33 |
-
return NextResponse.json({ error: `
|
| 34 |
}
|
| 35 |
|
| 36 |
const data = await response.json();
|
|
|
|
| 2 |
import { NextResponse } from 'next/server';
|
| 3 |
|
| 4 |
/**
|
| 5 |
+
* @fileOverview API Route وسيط للتعامل مع OpenRouter لتجنب مشاكل الشبكة في Server Actions.
|
| 6 |
*/
|
| 7 |
|
| 8 |
export async function POST(req: Request) {
|
| 9 |
try {
|
| 10 |
const { prompt, model } = await req.json();
|
| 11 |
+
const apiKey = process.env.OPENROUTER_API_KEY;
|
| 12 |
|
| 13 |
if (!apiKey) {
|
| 14 |
return NextResponse.json({ error: "API Key missing in .env" }, { status: 500 });
|
| 15 |
}
|
| 16 |
|
| 17 |
+
const response = await fetch("https://openrouter.ai/api/v1/chat/completions", {
|
| 18 |
method: 'POST',
|
| 19 |
headers: {
|
| 20 |
'Content-Type': 'application/json',
|
| 21 |
+
'Authorization': `Bearer ${apiKey.trim()}`,
|
| 22 |
+
'HTTP-Referer': 'https://protochat.app',
|
| 23 |
+
'X-Title': 'ProtoChat'
|
| 24 |
},
|
| 25 |
body: JSON.stringify({
|
| 26 |
+
model: model || "google/gemini-2.0-flash-lite-preview-02-05:free",
|
| 27 |
messages: [{ role: "user", content: prompt }],
|
| 28 |
+
temperature: 0.7
|
|
|
|
| 29 |
})
|
| 30 |
});
|
| 31 |
|
| 32 |
if (!response.ok) {
|
| 33 |
const errorText = await response.text();
|
| 34 |
+
return NextResponse.json({ error: `OpenRouter API responded with ${response.status}: ${errorText}` }, { status: response.status });
|
| 35 |
}
|
| 36 |
|
| 37 |
const data = await response.json();
|
src/lib/gemini-client.ts
CHANGED
|
@@ -1,12 +1,13 @@
|
|
| 1 |
|
| 2 |
/**
|
| 3 |
-
* @fileOverview المحرك الأساسي للاتصال بـ
|
| 4 |
*/
|
| 5 |
|
| 6 |
const MODELS = [
|
| 7 |
-
"gemini-2.0-flash-lite-preview-02-05",
|
| 8 |
-
"
|
| 9 |
-
"
|
|
|
|
| 10 |
];
|
| 11 |
|
| 12 |
function getRandomModel(): string {
|
|
@@ -14,29 +15,30 @@ function getRandomModel(): string {
|
|
| 14 |
}
|
| 15 |
|
| 16 |
/**
|
| 17 |
-
* دالة إرسال الطلب الأساسي للـ AI عبر
|
| 18 |
*/
|
| 19 |
export async function askAI(prompt: string): Promise<{ success: true, answer: string, model: string } | { success: false, message: string }> {
|
| 20 |
try {
|
| 21 |
-
const apiKey = process.env.
|
| 22 |
if (!apiKey) {
|
| 23 |
-
return { success: false, message: "API Key is missing." };
|
| 24 |
}
|
| 25 |
|
| 26 |
const selectedModel = getRandomModel();
|
| 27 |
|
| 28 |
-
const response = await fetch("https://
|
| 29 |
method: 'POST',
|
| 30 |
headers: {
|
| 31 |
'Content-Type': 'application/json',
|
| 32 |
-
'Authorization': `Bearer ${apiKey.trim()}`
|
|
|
|
|
|
|
| 33 |
},
|
| 34 |
body: JSON.stringify({
|
| 35 |
model: selectedModel,
|
| 36 |
messages: [{ role: "user", content: prompt }],
|
| 37 |
temperature: 0.7
|
| 38 |
}),
|
| 39 |
-
// Next.js specific to avoid caching in server actions
|
| 40 |
cache: 'no-store'
|
| 41 |
});
|
| 42 |
|
|
|
|
| 1 |
|
| 2 |
/**
|
| 3 |
+
* @fileOverview المحرك الأساسي للاتصال بـ OpenRouter باستخدام واجهة OpenAI المتوافقة.
|
| 4 |
*/
|
| 5 |
|
| 6 |
const MODELS = [
|
| 7 |
+
"google/gemini-2.0-flash-lite-preview-02-05:free",
|
| 8 |
+
"meta-llama/llama-3.1-8b-instruct:free",
|
| 9 |
+
"mistralai/mistral-7b-instruct:free",
|
| 10 |
+
"google/gemini-2.0-flash-lite-preview-02-05"
|
| 11 |
];
|
| 12 |
|
| 13 |
function getRandomModel(): string {
|
|
|
|
| 15 |
}
|
| 16 |
|
| 17 |
/**
|
| 18 |
+
* دالة إرسال الطلب الأساسي للـ AI عبر OpenRouter.
|
| 19 |
*/
|
| 20 |
export async function askAI(prompt: string): Promise<{ success: true, answer: string, model: string } | { success: false, message: string }> {
|
| 21 |
try {
|
| 22 |
+
const apiKey = process.env.OPENROUTER_API_KEY;
|
| 23 |
if (!apiKey) {
|
| 24 |
+
return { success: false, message: "OpenRouter API Key is missing." };
|
| 25 |
}
|
| 26 |
|
| 27 |
const selectedModel = getRandomModel();
|
| 28 |
|
| 29 |
+
const response = await fetch("https://openrouter.ai/api/v1/chat/completions", {
|
| 30 |
method: 'POST',
|
| 31 |
headers: {
|
| 32 |
'Content-Type': 'application/json',
|
| 33 |
+
'Authorization': `Bearer ${apiKey.trim()}`,
|
| 34 |
+
'HTTP-Referer': 'https://protochat.app', // Optional, for OpenRouter rankings
|
| 35 |
+
'X-Title': 'ProtoChat' // Optional
|
| 36 |
},
|
| 37 |
body: JSON.stringify({
|
| 38 |
model: selectedModel,
|
| 39 |
messages: [{ role: "user", content: prompt }],
|
| 40 |
temperature: 0.7
|
| 41 |
}),
|
|
|
|
| 42 |
cache: 'no-store'
|
| 43 |
});
|
| 44 |
|