looda3131 commited on
Commit
0ccd641
·
1 Parent(s): 61228d1

طيب استخدم هذا كخيار اضافي ايضا open router : sk-or-v1-0688df4786526b1cc

Browse files
src/ai/flows/ai-generate-post-ideas.ts CHANGED
@@ -33,12 +33,12 @@ export async function generatePostIdeas(
33
  const language = input.language || 'ar';
34
  const FIXED_COUNT = 30; // تثبيت العدد عند 30 منشوراً
35
 
36
- console.log(`[POST_GEN]: Initializing generation for ${FIXED_COUNT} posts... Engine: ${input.aiEngine || 'default'}`);
37
 
38
  const prompt = promptTemplate(FIXED_COUNT, location, language, input.age);
39
 
40
  try {
41
- const { output, model } = await safeGenerateContent(prompt, input.aiEngine);
42
 
43
  if (!output || !Array.isArray(output.posts)) {
44
  console.error("[POST_GEN_ERROR]: AI output missing 'posts' array. Model used:", model);
@@ -47,7 +47,6 @@ export async function generatePostIdeas(
47
 
48
  console.log(`[POST_GEN_SUCCESS]: Generated ${output.posts.length} posts successfully using: ${model}`);
49
 
50
- // ضمان بقاء العدد 30
51
  return {
52
  posts: output.posts.slice(0, FIXED_COUNT),
53
  modelUsed: model
 
33
  const language = input.language || 'ar';
34
  const FIXED_COUNT = 30; // تثبيت العدد عند 30 منشوراً
35
 
36
+ console.log(`[POST_GEN]: Initializing generation for ${FIXED_COUNT} posts... Preference: ${input.aiEngine || 'default'}`);
37
 
38
  const prompt = promptTemplate(FIXED_COUNT, location, language, input.age);
39
 
40
  try {
41
+ const { output, model } = await safeGenerateContent(prompt, input.aiEngine as any);
42
 
43
  if (!output || !Array.isArray(output.posts)) {
44
  console.error("[POST_GEN_ERROR]: AI output missing 'posts' array. Model used:", model);
 
47
 
48
  console.log(`[POST_GEN_SUCCESS]: Generated ${output.posts.length} posts successfully using: ${model}`);
49
 
 
50
  return {
51
  posts: output.posts.slice(0, FIXED_COUNT),
52
  modelUsed: model
src/ai/flows/ai-text-to-speech.ts CHANGED
@@ -1,7 +1,7 @@
1
 
2
  'use server';
3
  /**
4
- * @fileOverview تحويل النص إلى صوت باستخدام ElevenLabs مع المفتاح المحدث وسجلات تتبع دقيقة.
5
  */
6
 
7
  import type { TextToSpeechInput, TextToSpeechOutput } from './types';
 
1
 
2
  'use server';
3
  /**
4
+ * @fileOverview تحويل النص إلى صوت باستخدام ElevenLabs مع المفتاح المعتمد وسجلات تتبع دقيقة.
5
  */
6
 
7
  import type { TextToSpeechInput, TextToSpeechOutput } from './types';
src/components/header.tsx CHANGED
@@ -1,7 +1,7 @@
1
 
2
  'use client';
3
  import { ThemeToggle } from '@/components/theme-toggle';
4
- import { MessageSquare, Users, Mic, LogOut, User as UserIcon, Languages, Compass, Menu, Cpu, Sparkles } from 'lucide-react';
5
  import Link from 'next/link';
6
  import { Button } from './ui/button';
7
  import { useAuth } from '@/contexts/auth-context';
@@ -65,6 +65,18 @@ export function Header({ title }: HeaderProps) {
65
  );
66
  };
67
 
 
 
 
 
 
 
 
 
 
 
 
 
68
  return (
69
  <header className="fixed top-0 left-0 right-0 z-50 h-16 radiant-glass border-b">
70
  <div className="container mx-auto h-full flex items-center justify-between px-4">
@@ -99,9 +111,9 @@ export function Header({ title }: HeaderProps) {
99
  {/* AI Engine Selector */}
100
  <DropdownMenu>
101
  <DropdownMenuTrigger asChild>
102
- <Button variant="ghost" size="sm" className="rounded-2xl gap-2 hover:bg-purple-500/5 font-bold transition-all border border-purple-500/20">
103
- {aiEngine === 'primary' ? <Sparkles className="h-4 w-4 text-primary" /> : <Cpu className="h-4 w-4 text-purple-500" />}
104
- <span className="hidden xs:inline">{aiEngine === 'primary' ? 'Gemini' : 'Groq'}</span>
105
  </Button>
106
  </DropdownMenuTrigger>
107
  <DropdownMenuContent className="w-56 radiant-glass rounded-2xl p-2 border-none shadow-2xl">
@@ -111,6 +123,9 @@ export function Header({ title }: HeaderProps) {
111
  <DropdownMenuRadioItem value="primary" className="rounded-xl cursor-pointer p-3 font-bold">
112
  {t.primaryEngine}
113
  </DropdownMenuRadioItem>
 
 
 
114
  <DropdownMenuRadioItem value="fallback" className="rounded-xl cursor-pointer p-3 font-bold">
115
  {t.fallbackEngine}
116
  </DropdownMenuRadioItem>
 
1
 
2
  'use client';
3
  import { ThemeToggle } from '@/components/theme-toggle';
4
+ import { MessageSquare, Users, Mic, LogOut, User as UserIcon, Languages, Compass, Menu, Cpu, Sparkles, Zap } from 'lucide-react';
5
  import Link from 'next/link';
6
  import { Button } from './ui/button';
7
  import { useAuth } from '@/contexts/auth-context';
 
65
  );
66
  };
67
 
68
+ const getEngineIcon = () => {
69
+ if (aiEngine === 'primary') return <Sparkles className="h-4 w-4 text-primary" />;
70
+ if (aiEngine === 'advanced') return <Zap className="h-4 w-4 text-orange-500" />;
71
+ return <Cpu className="h-4 w-4 text-purple-500" />;
72
+ };
73
+
74
+ const getEngineName = () => {
75
+ if (aiEngine === 'primary') return 'Gemini';
76
+ if (aiEngine === 'advanced') return 'OpenRouter';
77
+ return 'Groq';
78
+ };
79
+
80
  return (
81
  <header className="fixed top-0 left-0 right-0 z-50 h-16 radiant-glass border-b">
82
  <div className="container mx-auto h-full flex items-center justify-between px-4">
 
111
  {/* AI Engine Selector */}
112
  <DropdownMenu>
113
  <DropdownMenuTrigger asChild>
114
+ <Button variant="ghost" size="sm" className="rounded-2xl gap-2 hover:bg-primary/5 font-bold transition-all border border-primary/20">
115
+ {getEngineIcon()}
116
+ <span className="hidden xs:inline">{getEngineName()}</span>
117
  </Button>
118
  </DropdownMenuTrigger>
119
  <DropdownMenuContent className="w-56 radiant-glass rounded-2xl p-2 border-none shadow-2xl">
 
123
  <DropdownMenuRadioItem value="primary" className="rounded-xl cursor-pointer p-3 font-bold">
124
  {t.primaryEngine}
125
  </DropdownMenuRadioItem>
126
+ <DropdownMenuRadioItem value="advanced" className="rounded-xl cursor-pointer p-3 font-bold">
127
+ {t.advancedEngine}
128
+ </DropdownMenuRadioItem>
129
  <DropdownMenuRadioItem value="fallback" className="rounded-xl cursor-pointer p-3 font-bold">
130
  {t.fallbackEngine}
131
  </DropdownMenuRadioItem>
src/contexts/language-context.tsx CHANGED
@@ -4,7 +4,7 @@
4
  import React, { createContext, useContext, useState, useEffect } from 'react';
5
  import { Language, translations } from '@/lib/translations';
6
 
7
- export type AIEngine = 'primary' | 'fallback';
8
 
9
  interface LanguageContextType {
10
  lang: Language;
@@ -26,6 +26,10 @@ export const LanguageProvider = ({ children }: { children: React.ReactNode }) =>
26
 
27
  const savedEngine = localStorage.getItem('proto-ai-engine') as AIEngine;
28
  if (savedEngine) setAiEngineState(savedEngine);
 
 
 
 
29
  }, []);
30
 
31
  const setLang = (newLang: Language) => {
 
4
  import React, { createContext, useContext, useState, useEffect } from 'react';
5
  import { Language, translations } from '@/lib/translations';
6
 
7
+ export type AIEngine = 'primary' | 'fallback' | 'advanced';
8
 
9
  interface LanguageContextType {
10
  lang: Language;
 
26
 
27
  const savedEngine = localStorage.getItem('proto-ai-engine') as AIEngine;
28
  if (savedEngine) setAiEngineState(savedEngine);
29
+
30
+ // ضبط اتجاه النص
31
+ document.documentElement.dir = (savedLang || 'ar') === 'ar' ? 'rtl' : 'ltr';
32
+ document.documentElement.lang = (savedLang || 'ar');
33
  }, []);
34
 
35
  const setLang = (newLang: Language) => {
src/lib/gemini-client.ts CHANGED
@@ -1,25 +1,21 @@
1
 
2
  /**
3
- * @fileOverview المحرك الرئيسي للذكاء الاصطناعي مع نظام Fallback ذكي (Gemini -> Groq) وسجلات تتبع دقيقة.
4
- * يدعم اختيار المستخدم للمحرك المفضل.
5
  */
6
 
7
  const GEMINI_KEY = "AIzaSyA_0i-0yCk9m6ehCIZ87_CKbUMrwlea-_s";
8
- const GEMINI_MODEL = "gemini-2.5-flash-lite";
9
 
10
  const GROQ_KEY = "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";
11
  const GROQ_MODELS = ["llama-3.3-70b-versatile", "mixtral-8x7b-32768"];
12
 
13
- /**
14
- * دالة استخراج JSON مطورة تتعامل مع النصوص الزائدة وعلامات Markdown
15
- */
16
  function extractJsonFromText(text: string): string {
17
  const jsonBlockMatch = text.match(/```json\s*([\s\S]*?)\s*```/);
18
  if (jsonBlockMatch) return jsonBlockMatch[1].trim();
19
 
20
- const genericBlockMatch = text.match(/```\s*([\s\S]*?)\s*```/);
21
- if (genericBlockMatch) return genericBlockMatch[1].trim();
22
-
23
  const firstBrace = text.indexOf('{');
24
  const lastBrace = text.lastIndexOf('}');
25
  if (firstBrace !== -1 && lastBrace !== -1 && lastBrace > firstBrace) {
@@ -28,37 +24,29 @@ function extractJsonFromText(text: string): string {
28
  return text.trim();
29
  }
30
 
31
- /**
32
- * المحرك الأساسي: Gemini 2.5 Flash Lite
33
- */
34
  async function callGemini(prompt: string): Promise<string> {
35
- console.log(`[AI_PROVIDER]: Attempting Gemini (${GEMINI_MODEL})...`);
36
  const response = await fetch(`https://generativelanguage.googleapis.com/v1beta/models/${GEMINI_MODEL}:generateContent?key=${GEMINI_KEY}`, {
37
  method: 'POST',
38
  headers: { 'Content-Type': 'application/json' },
39
  body: JSON.stringify({
40
  contents: [{ parts: [{ text: prompt }] }],
41
- generationConfig: { maxOutputTokens: 4096, temperature: 0.7 }
42
  })
43
  });
44
 
45
  if (!response.ok) {
46
  const err = await response.text();
47
- console.error(`[AI_ERROR_GEMINI]: Status ${response.status} - Details: ${err}`);
48
- throw new Error(`Gemini Failed: ${response.status}`);
49
  }
50
 
51
  const data = await response.json();
52
- const text = data.candidates?.[0]?.content?.parts?.[0]?.text;
53
- if (!text) throw new Error("Gemini returned empty content.");
54
- return text;
55
  }
56
 
57
- /**
58
- * المحرك الاحتياطي: Groq
59
- */
60
  async function callGroq(prompt: string, model: string): Promise<string> {
61
- console.log(`[AI_PROVIDER]: Attempting Groq (${model})...`);
62
  const response = await fetch("https://api.groq.com/openai/v1/chat/completions", {
63
  method: 'POST',
64
  headers: {
@@ -74,63 +62,98 @@ async function callGroq(prompt: string, model: string): Promise<string> {
74
 
75
  if (!response.ok) {
76
  const err = await response.text();
77
- console.error(`[AI_ERROR_GROQ]: Status ${response.status} - Details: ${err}`);
78
- throw new Error(`Groq Failed: ${response.status}`);
79
  }
80
 
81
  const data = await response.json();
82
- const text = data.choices?.[0]?.message?.content;
83
- if (!text) throw new Error("Groq returned empty content.");
84
- return text;
85
  }
86
 
87
- /**
88
- * تنفيذ الطلب مع احترام تفضيلات المستخدم وسلسلة الـ Fallback
89
- */
90
- export async function askAI(prompt: string, preferredEngine: 'primary' | 'fallback' = 'primary'): Promise<{ success: true, answer: string, model: string }> {
91
- console.log(`[AI_CHAIN_START]: Preferred Engine: ${preferredEngine}`);
92
-
93
- const providers = preferredEngine === 'primary'
94
- ? [() => callGemini(prompt), () => callGroq(prompt, GROQ_MODELS[0]), () => callGroq(prompt, GROQ_MODELS[1])]
95
- : [() => callGroq(prompt, GROQ_MODELS[0]), () => callGroq(prompt, GROQ_MODELS[1]), () => callGemini(prompt)];
 
 
 
 
 
 
 
 
 
 
 
 
 
96
 
97
- const modelNames = preferredEngine === 'primary'
98
- ? [GEMINI_MODEL, GROQ_MODELS[0], GROQ_MODELS[1]]
99
- : [GROQ_MODELS[0], GROQ_MODELS[1], GEMINI_MODEL];
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
 
101
- for (let i = 0; i < providers.length; i++) {
102
  try {
103
- const answer = await providers[i]();
104
- console.log(`[AI_CHAIN_SUCCESS]: Used ${modelNames[i]}.`);
105
- return { success: true, answer, model: modelNames[i] };
 
 
106
  } catch (e: any) {
107
- console.warn(`[AI_CHAIN_STEP_FAILED]: Provider ${modelNames[i]} failed. Moving to next... Error: ${e.message}`);
108
  }
109
  }
110
 
111
- console.error(`[AI_CHAIN_CRITICAL]: All AI Providers Failed!`);
112
- throw new Error("All AI providers and models in fallback chain failed.");
113
  }
114
 
115
- export const safeGenerateContent = async (prompt: string, aiEngine: 'primary' | 'fallback' = 'primary'): Promise<{ output: any, model: string }> => {
116
- console.log("[AI_GEN_JSON]: Starting secure content generation (JSON Mode)...");
117
  const result = await askAI(prompt, aiEngine);
118
- const rawText = result.answer;
119
- const cleanedJsonText = extractJsonFromText(rawText);
120
 
121
  try {
122
  const parsed = JSON.parse(cleanedJsonText);
123
- console.log(`[AI_GEN_JSON_SUCCESS]: Parsed successfully via ${result.model}.`);
124
  return { output: parsed, model: result.model };
125
  } catch (error: any) {
126
- console.error("[AI_GEN_JSON_ERROR]: Invalid JSON structure received. Model used:", result.model);
127
- console.error("[RAW_OUTPUT_DEBUG]:", rawText);
128
- throw new Error(`AI response was not valid JSON: ${error.message}`);
129
  }
130
  };
131
 
132
- export const safeGenerateChat = async (prompt: string, aiEngine: 'primary' | 'fallback' = 'primary'): Promise<{ answer: string, model: string }> => {
133
- console.log("[AI_GEN_CHAT]: Starting chat generation (Text Mode)...");
134
  const result = await askAI(prompt, aiEngine);
135
  return { answer: result.answer, model: result.model };
136
  };
 
1
 
2
  /**
3
+ * @fileOverview المحرك الرئيسي للذكاء الاصطناعي مع دعم (Gemini, Groq, OpenRouter) ونظام تشخيص كامل.
 
4
  */
5
 
6
  const GEMINI_KEY = "AIzaSyA_0i-0yCk9m6ehCIZ87_CKbUMrwlea-_s";
7
+ const GEMINI_MODEL = "gemini-2.0-flash-lite-preview-02-05"; // استخدام أحدث طراز متاح حالياً
8
 
9
  const GROQ_KEY = "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";
10
  const GROQ_MODELS = ["llama-3.3-70b-versatile", "mixtral-8x7b-32768"];
11
 
12
+ const OPENROUTER_KEY = "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";
13
+ const OPENROUTER_MODELS = ["google/gemini-2.0-flash-lite-preview-02-05:free", "mistralai/mistral-7b-instruct:free"];
14
+
15
  function extractJsonFromText(text: string): string {
16
  const jsonBlockMatch = text.match(/```json\s*([\s\S]*?)\s*```/);
17
  if (jsonBlockMatch) return jsonBlockMatch[1].trim();
18
 
 
 
 
19
  const firstBrace = text.indexOf('{');
20
  const lastBrace = text.lastIndexOf('}');
21
  if (firstBrace !== -1 && lastBrace !== -1 && lastBrace > firstBrace) {
 
24
  return text.trim();
25
  }
26
 
 
 
 
27
  async function callGemini(prompt: string): Promise<string> {
28
+ console.log(`[AI_PROVIDER]: Connecting to Gemini (${GEMINI_MODEL})...`);
29
  const response = await fetch(`https://generativelanguage.googleapis.com/v1beta/models/${GEMINI_MODEL}:generateContent?key=${GEMINI_KEY}`, {
30
  method: 'POST',
31
  headers: { 'Content-Type': 'application/json' },
32
  body: JSON.stringify({
33
  contents: [{ parts: [{ text: prompt }] }],
34
+ generationConfig: { temperature: 0.7 }
35
  })
36
  });
37
 
38
  if (!response.ok) {
39
  const err = await response.text();
40
+ console.error(`[AI_ERROR_GEMINI]: ${response.status} - ${err}`);
41
+ throw new Error(`Gemini Error: ${response.status}`);
42
  }
43
 
44
  const data = await response.json();
45
+ return data.candidates?.[0]?.content?.parts?.[0]?.text || "";
 
 
46
  }
47
 
 
 
 
48
  async function callGroq(prompt: string, model: string): Promise<string> {
49
+ console.log(`[AI_PROVIDER]: Connecting to Groq (${model})...`);
50
  const response = await fetch("https://api.groq.com/openai/v1/chat/completions", {
51
  method: 'POST',
52
  headers: {
 
62
 
63
  if (!response.ok) {
64
  const err = await response.text();
65
+ console.error(`[AI_ERROR_GROQ]: ${response.status} - ${err}`);
66
+ throw new Error(`Groq Error: ${response.status}`);
67
  }
68
 
69
  const data = await response.json();
70
+ return data.choices?.[0]?.message?.content || "";
 
 
71
  }
72
 
73
+ async function callOpenRouter(prompt: string, model: string): Promise<string> {
74
+ console.log(`[AI_PROVIDER]: Connecting to OpenRouter (${model})...`);
75
+ const response = await fetch("https://openrouter.ai/api/v1/chat/completions", {
76
+ method: 'POST',
77
+ headers: {
78
+ 'Authorization': `Bearer ${OPENROUTER_KEY}`,
79
+ 'Content-Type': 'application/json',
80
+ 'HTTP-Referer': 'https://zeina-chat.app',
81
+ 'X-Title': 'Zeina Chat'
82
+ },
83
+ body: JSON.stringify({
84
+ model: model,
85
+ messages: [{ role: "user", content: prompt }],
86
+ temperature: 0.7
87
+ })
88
+ });
89
+
90
+ if (!response.ok) {
91
+ const err = await response.text();
92
+ console.error(`[AI_ERROR_OPENROUTER]: ${response.status} - ${err}`);
93
+ throw new Error(`OpenRouter Error: ${response.status}`);
94
+ }
95
 
96
+ const data = await response.json();
97
+ return data.choices?.[0]?.message?.content || "";
98
+ }
99
+
100
+ export async function askAI(prompt: string, preferredEngine: 'primary' | 'fallback' | 'advanced' = 'primary'): Promise<{ success: true, answer: string, model: string }> {
101
+ console.log(`[AI_CHAIN]: Initializing chain for preference: ${preferredEngine}`);
102
+
103
+ let providers: { fn: () => Promise<string>, name: string }[] = [];
104
+
105
+ if (preferredEngine === 'primary') {
106
+ providers = [
107
+ { fn: () => callGemini(prompt), name: GEMINI_MODEL },
108
+ { fn: () => callOpenRouter(prompt, OPENROUTER_MODELS[0]), name: "OpenRouter-Lite" },
109
+ { fn: () => callGroq(prompt, GROQ_MODELS[0]), name: GROQ_MODELS[0] }
110
+ ];
111
+ } else if (preferredEngine === 'advanced') {
112
+ providers = [
113
+ { fn: () => callOpenRouter(prompt, OPENROUTER_MODELS[0]), name: "OpenRouter-Lite" },
114
+ { fn: () => callGemini(prompt), name: GEMINI_MODEL },
115
+ { fn: () => callGroq(prompt, GROQ_MODELS[0]), name: GROQ_MODELS[0] }
116
+ ];
117
+ } else {
118
+ providers = [
119
+ { fn: () => callGroq(prompt, GROQ_MODELS[0]), name: GROQ_MODELS[0] },
120
+ { fn: () => callGroq(prompt, GROQ_MODELS[1]), name: GROQ_MODELS[1] },
121
+ { fn: () => callOpenRouter(prompt, OPENROUTER_MODELS[0]), name: "OpenRouter-Lite" }
122
+ ];
123
+ }
124
 
125
+ for (const provider of providers) {
126
  try {
127
+ const answer = await provider.fn();
128
+ if (answer) {
129
+ console.log(`[AI_CHAIN_SUCCESS]: Resolved via ${provider.name}`);
130
+ return { success: true, answer, model: provider.name };
131
+ }
132
  } catch (e: any) {
133
+ console.warn(`[AI_CHAIN_RETRY]: ${provider.name} failed: ${e.message}`);
134
  }
135
  }
136
 
137
+ console.error(`[AI_CHAIN_FATAL]: All providers exhausted.`);
138
+ throw new Error("All AI providers failed.");
139
  }
140
 
141
+ export const safeGenerateContent = async (prompt: string, aiEngine: 'primary' | 'fallback' | 'advanced' = 'primary'): Promise<{ output: any, model: string }> => {
 
142
  const result = await askAI(prompt, aiEngine);
143
+ const cleanedJsonText = extractJsonFromText(result.answer);
 
144
 
145
  try {
146
  const parsed = JSON.parse(cleanedJsonText);
147
+ console.log(`[AI_JSON_OK]: Parsed response from ${result.model}`);
148
  return { output: parsed, model: result.model };
149
  } catch (error: any) {
150
+ console.error(`[AI_JSON_FAIL]: Model ${result.model} returned invalid JSON structure.`);
151
+ console.error(`[DEBUG_RAW]:`, result.answer);
152
+ throw new Error("AI response was not valid JSON.");
153
  }
154
  };
155
 
156
+ export const safeGenerateChat = async (prompt: string, aiEngine: 'primary' | 'fallback' | 'advanced' = 'primary'): Promise<{ answer: string, model: string }> => {
 
157
  const result = await askAI(prompt, aiEngine);
158
  return { answer: result.answer, model: result.model };
159
  };
src/lib/translations.ts CHANGED
@@ -70,6 +70,7 @@ export const translations = {
70
  aiEngine: 'محرك الذكاء الاصطناعي',
71
  primaryEngine: 'الأساسي (Gemini)',
72
  fallbackEngine: 'الاحتياطي (Groq)',
 
73
  },
74
  en: {
75
  appTitle: 'ZeinaChat',
@@ -139,5 +140,6 @@ export const translations = {
139
  aiEngine: 'AI Engine',
140
  primaryEngine: 'Primary (Gemini)',
141
  fallbackEngine: 'Fallback (Groq)',
 
142
  }
143
  };
 
70
  aiEngine: 'محرك الذكاء الاصطناعي',
71
  primaryEngine: 'الأساسي (Gemini)',
72
  fallbackEngine: 'الاحتياطي (Groq)',
73
+ advancedEngine: 'المتقدم (OpenRouter)',
74
  },
75
  en: {
76
  appTitle: 'ZeinaChat',
 
140
  aiEngine: 'AI Engine',
141
  primaryEngine: 'Primary (Gemini)',
142
  fallbackEngine: 'Fallback (Groq)',
143
+ advancedEngine: 'Advanced (OpenRouter)',
144
  }
145
  };