Chan-Y commited on
Commit
a94ab76
·
0 Parent(s):

Initial commit for HF Space

Browse files
.gitignore ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Python / Backend
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+ venv/
6
+ .env
7
+ .ipynb_checkpoints/
8
+
9
+ # Node / Frontend
10
+ node_modules/
11
+ dist/
12
+ .DS_Store
13
+ *.log
14
+ npm-debug.log*
15
+ yarn-debug.log*
16
+ yarn-error.log*
17
+ coverage/
18
+ .vscode/
19
+ .idea/
Dockerfile ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Build Frontend
2
+ FROM node:20 AS frontend-builder
3
+ WORKDIR /app/frontend
4
+ COPY frontend/package*.json ./
5
+ RUN npm install
6
+ COPY frontend .
7
+ RUN npm run build
8
+
9
+ # Setup Backend
10
+ FROM python:3.10
11
+ WORKDIR /app
12
+
13
+ # Install dependencies
14
+ COPY requirements.txt .
15
+ RUN pip install --no-cache-dir -r requirements.txt
16
+
17
+ # Copy Backend Code
18
+ COPY backend ./backend
19
+ # Note: .env might not be safe to copy if it has secrets, but for this context it's okay.
20
+ # Better to use HF Space Secrets.
21
+ COPY .env .
22
+
23
+ # Copy Built Frontend to Static Directory expected by backend.py
24
+ COPY --from=frontend-builder /app/frontend/dist ./static
25
+
26
+ # Expose port 7860
27
+ EXPOSE 7860
28
+
29
+ # Run Application
30
+ CMD ["python", "backend/backend.py"]
README.md ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Turkish AI Assistant
3
+ emoji: 🇹🇷
4
+ colorFrom: red
5
+ colorTo: white
6
+ sdk: docker
7
+ app_port: 7860
8
+ ---
9
+
10
+ # Turkish AI Assistant
11
+
12
+ This is a Retrieval-Augmented Generation (RAG) agent or Fine-Tuned Model assistant capable of Turkish reasoning.
13
+
14
+ ## Deployment
15
+
16
+ This space is built using Docker. It:
17
+ 1. Builds the React frontend (Vite).
18
+ 2. Sets up a FastAPI backend.
19
+ 3. Serves the frontend statically from the backend.
backend/backend.py ADDED
@@ -0,0 +1,238 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from fastapi.middleware.cors import CORSMiddleware
3
+ from pydantic import BaseModel
4
+ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
5
+ from peft import PeftModel, PeftConfig
6
+ import gc
7
+ import torch
8
+ import os
9
+ from typing import Optional
10
+ from dotenv import load_dotenv
11
+
12
+ # Load environment variables
13
+ load_dotenv()
14
+
15
+ app = FastAPI()
16
+
17
+ # CORS ayarları - React uygulamanızın çalıştığı port'a izin verin
18
+ app.add_middleware(
19
+ CORSMiddleware,
20
+ allow_origins=["http://localhost:5173", "http://localhost:3000"], # Vite ve CRA portları
21
+ allow_credentials=True,
22
+ allow_methods=["*"],
23
+ allow_headers=["*"],
24
+ )
25
+
26
+ # Global variables
27
+ current_model = None
28
+ current_pipe = None
29
+ current_model_name = None
30
+
31
+ class GenerateRequest(BaseModel):
32
+ model_name: str
33
+ prompt: str
34
+ system_prompt: str
35
+ max_tokens: int = 512
36
+ temperature: float = 0.75
37
+ top_p: float = 0.95
38
+ top_k: int = 64
39
+ image: Optional[str] = None # Base64 encoded image
40
+
41
+ class GenerateResponse(BaseModel):
42
+ generated_text: str
43
+ model_used: str
44
+
45
+ def load_model(model_path: str):
46
+ global current_model, current_pipe, current_model_name
47
+
48
+ # Return existing pipeline if the model is already loaded
49
+ if current_pipe is not None and current_model_name == model_path:
50
+ return current_pipe
51
+
52
+ print(f"Unloading previous model to load: {model_path}")
53
+
54
+ # Cleanup previous model
55
+ if current_model is not None:
56
+ del current_model
57
+ if current_pipe is not None:
58
+ del current_pipe
59
+
60
+ gc.collect()
61
+ torch.cuda.empty_cache()
62
+
63
+ try:
64
+ if "Gemma3-1B" in model_path:
65
+ print("Loading Gemma 3 1B with PEFT...")
66
+ base_model_name = "unsloth/gemma-3-1b-it"
67
+ tokenizer = AutoTokenizer.from_pretrained(base_model_name)
68
+ base_model = AutoModelForCausalLM.from_pretrained(
69
+ base_model_name,
70
+ device_map="auto",
71
+ dtype="auto"
72
+ )
73
+ model = PeftModel.from_pretrained(base_model, model_path)
74
+ pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
75
+ current_model = model
76
+
77
+ elif "Gemma3-12B" in model_path:
78
+ print("Loading Gemma 3 12B with PEFT (Image Support)...")
79
+ base_model_name = "unsloth/gemma-3-12b-it"
80
+ tokenizer = AutoTokenizer.from_pretrained(base_model_name)
81
+ base_model = AutoModelForCausalLM.from_pretrained(
82
+ base_model_name,
83
+ device_map="auto",
84
+ dtype="auto"
85
+ )
86
+ model = PeftModel.from_pretrained(base_model, model_path)
87
+ pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
88
+ current_model = model
89
+
90
+ elif "Qwen2.5-3B" in model_path:
91
+ print("Loading Qwen 2.5 3B...")
92
+ pipe = pipeline("text-generation", model=model_path, device=0)
93
+ current_model = pipe.model # Keep reference for simple consistency
94
+
95
+ elif "Llama3.1-8B" in model_path:
96
+ print("Loading Llama 3.1 8B...")
97
+ pipe = pipeline("text-generation", model=model_path, device=0)
98
+ current_model = pipe.model
99
+
100
+ else:
101
+ print(f"Unknown model pattern for {model_path}, trying default pipeline loading...")
102
+ pipe = pipeline("text-generation", model=model_path, device=0)
103
+ current_model = pipe.model
104
+
105
+ current_pipe = pipe
106
+ current_model_name = model_path
107
+ return pipe
108
+
109
+ except Exception as e:
110
+ print(f"Error loading model {model_path}: {str(e)}")
111
+ raise HTTPException(status_code=500, detail=f"Model loading failed: {str(e)}")
112
+
113
+ # Initialize with default 1B model
114
+ default_model = "Chan-Y/TurkishReasoner-Gemma3-1B"
115
+ try:
116
+ load_model(default_model)
117
+ except Exception as e:
118
+ print(f"Initial model loading failed (might be expected in dev env): {e}")
119
+
120
+ @app.get("/")
121
+ def read_root():
122
+ return {"message": "Turkish AI Backend API is running"}
123
+
124
+ @app.get("/models")
125
+ def get_models():
126
+ """Return available models"""
127
+ return {
128
+ "models": [
129
+ {
130
+ "name": "Gemma 3 1B Turkish Reasoning",
131
+ "path": "Chan-Y/TurkishReasoner-Gemma3-1B",
132
+ "supportsImages": False
133
+ },
134
+ {
135
+ "name": "Gemma 3 12B Turkish (Supports Images)",
136
+ "path": "Chan-Y/TurkishReasoner-Gemma3-12B",
137
+ "supportsImages": True
138
+ },
139
+ {
140
+ "name": "Qwen 2.5 3B Turkish Reasoning",
141
+ "path": "Chan-Y/TurkishReasoner-Qwen2.5-3B",
142
+ "supportsImages": False
143
+ },
144
+ {
145
+ "name": "Llama 3.1 8B Turkish Reasoning",
146
+ "path": "Chan-Y/TurkishReasoner-Llama3.1-8B",
147
+ "supportsImages": False
148
+ }
149
+ ]
150
+ }
151
+
152
+ @app.post("/generate", response_model=GenerateResponse)
153
+ async def generate_text(request: GenerateRequest):
154
+ """Generate text using the model"""
155
+ global current_pipe
156
+
157
+ try:
158
+ # Load requested model if different
159
+ pipe = load_model(request.model_name)
160
+
161
+ # Prepare message content
162
+ user_content = [{"type": "text", "text": request.prompt}]
163
+
164
+ # Add image if provided and supported
165
+ if request.image and "Gemma3-12B" in request.model_name:
166
+ # Assuming the image string is a data:image/jpeg;base64,... URI
167
+ # Pipeline might expect a PIL image or a URL or strictly formatted dict
168
+ # Standard transformers pipeline behavior for image:
169
+ # {"type": "image", "image": "base64_string_or_url"}
170
+ user_content.insert(0, {"type": "image", "image": request.image})
171
+
172
+ messages = [
173
+ {
174
+ "role": "system",
175
+ "content": [{"type": "text", "text": request.system_prompt}]
176
+ },
177
+ {
178
+ "role": "user",
179
+ "content": user_content
180
+ },
181
+ ]
182
+
183
+ # Clean up system prompt if empty or not supported by some models?
184
+ # Standard chat templates usually handle system prompts.
185
+
186
+ print(f"Generating with {request.model_name}, temp={request.temperature}")
187
+
188
+ output = pipe(
189
+ messages,
190
+ max_new_tokens=request.max_tokens,
191
+ temperature=request.temperature,
192
+ top_p=request.top_p,
193
+ top_k=request.top_k
194
+ )
195
+
196
+ generated_text = output[0]["generated_text"][-1]["content"]
197
+
198
+ return GenerateResponse(
199
+ generated_text=generated_text,
200
+ model_used=request.model_name
201
+ )
202
+
203
+ except Exception as e:
204
+ print(f"Error during generation: {str(e)}")
205
+ raise HTTPException(status_code=500, detail=str(e))
206
+
207
+ @app.post("/generate/stream")
208
+ async def generate_text_stream(request: GenerateRequest):
209
+ """
210
+ Streaming endpoint for real-time generation
211
+ (Not implemented in this version - would use Server-Sent Events)
212
+ """
213
+ raise HTTPException(status_code=501, detail="Streaming not yet implemented")
214
+
215
+ # --- Static Files Serving (for Deployment) ---
216
+ from fastapi.staticfiles import StaticFiles
217
+ from fastapi.responses import FileResponse
218
+
219
+ # Check if static directory exists (it will in Docker)
220
+ static_dir = "static"
221
+ if os.path.exists(static_dir):
222
+ app.mount("/assets", StaticFiles(directory=f"{static_dir}/assets"), name="assets")
223
+
224
+ # Catch-all for SPA (serve index.html)
225
+ @app.get("/{full_path:path}")
226
+ async def serve_spa(full_path: str):
227
+ # Allow API routes to pass through (though they match specifically defined routes first)
228
+ if full_path.startswith("api") or full_path.startswith("generate") or full_path.startswith("models"):
229
+ raise HTTPException(status_code=404, detail="Not found")
230
+
231
+ # Serve index.html for everything else
232
+ return FileResponse(f"{static_dir}/index.html")
233
+ else:
234
+ print("Static directory not found. Running in API-only mode.")
235
+
236
+ if __name__ == "__main__":
237
+ import uvicorn
238
+ uvicorn.run(app, host="0.0.0.0", port=7860)
frontend/App.tsx ADDED
@@ -0,0 +1,467 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { useState, useEffect, useRef } from 'react';
2
+ import { Sidebar } from './components/Sidebar';
3
+ import { MessageList } from './components/MessageList';
4
+ import { ChatInput } from './components/ChatInput';
5
+ import { Chat, Message, ModelConfig, GenerationParams, MODELS, DEFAULT_SYSTEM_PROMPT } from './types';
6
+ import { loadChats, saveChats, createNewChat, updateChatTitle } from './utils/chatStorage';
7
+ import { generateResponse } from './utils/api';
8
+ import { ChevronDown, Settings2 } from 'lucide-react';
9
+
10
+ function App() {
11
+ const [chats, setChats] = useState<Chat[]>([]);
12
+ const [currentChatId, setCurrentChatId] = useState<string | null>(null);
13
+ const [selectedModel, setSelectedModel] = useState<ModelConfig>(MODELS[0]); // Default to Gemma 1B
14
+ const [systemPrompt, setSystemPrompt] = useState<string>(DEFAULT_SYSTEM_PROMPT);
15
+ const [generationParams, setGenerationParams] = useState<GenerationParams>({
16
+ temperature: 0.75,
17
+ maxTokens: 1024,
18
+ topP: 0.95,
19
+ topK: 64
20
+ });
21
+ const [selectedImage, setSelectedImage] = useState<string | null>(null);
22
+ const [isLoading, setIsLoading] = useState(false);
23
+ const [loadingStatus, setLoadingStatus] = useState<string>('');
24
+ const [isSidebarCollapsed, setIsSidebarCollapsed] = useState(false);
25
+ const [showAdvancedSettings, setShowAdvancedSettings] = useState(false);
26
+ const settingsDropdownRef = useRef<HTMLDivElement>(null);
27
+
28
+ // Load chats from localStorage on mount
29
+ useEffect(() => {
30
+ const loadedChats = loadChats();
31
+ setChats(loadedChats);
32
+ }, []);
33
+
34
+ // Close advanced settings dropdown when clicking outside
35
+ useEffect(() => {
36
+ const handleClickOutside = (event: MouseEvent) => {
37
+ if (settingsDropdownRef.current && !settingsDropdownRef.current.contains(event.target as Node)) {
38
+ setShowAdvancedSettings(false);
39
+ }
40
+ };
41
+
42
+ if (showAdvancedSettings) {
43
+ document.addEventListener('mousedown', handleClickOutside);
44
+ }
45
+
46
+ return () => {
47
+ document.removeEventListener('mousedown', handleClickOutside);
48
+ };
49
+ }, [showAdvancedSettings]);
50
+
51
+ // Get current chat
52
+ const currentChat = chats.find(chat => chat.id === currentChatId);
53
+
54
+ // Handle new chat
55
+ const handleNewChat = () => {
56
+ // Eğer mevcut sohbet varsa ve hiç mesajı yoksa, yeni sohbet açma
57
+ if (currentChat && currentChat.messages.length === 0) {
58
+ return;
59
+ }
60
+
61
+ const newChat = createNewChat(selectedModel.name);
62
+ const updatedChats = [newChat, ...chats];
63
+ setChats(updatedChats);
64
+ setCurrentChatId(newChat.id);
65
+ setSelectedImage(null);
66
+ saveChats(updatedChats);
67
+ };
68
+
69
+ // Handle chat selection
70
+ const handleSelectChat = (chatId: string) => {
71
+ setCurrentChatId(chatId);
72
+ setSelectedImage(null);
73
+ const chat = chats.find(c => c.id === chatId);
74
+ if (chat) {
75
+ const model = MODELS.find(m => m.name === chat.modelName);
76
+ if (model) setSelectedModel(model);
77
+ }
78
+ };
79
+
80
+ // Handle chat deletion
81
+ const handleDeleteChat = (chatId: string) => {
82
+ const updatedChats = chats.filter(chat => chat.id !== chatId);
83
+ setChats(updatedChats);
84
+ saveChats(updatedChats);
85
+ if (currentChatId === chatId) {
86
+ setCurrentChatId(null);
87
+ setSelectedImage(null);
88
+ }
89
+ };
90
+
91
+ // Handle model change
92
+ const handleModelChange = (model: ModelConfig) => {
93
+ setSelectedModel(model);
94
+ if (!model.supportsImages) {
95
+ setSelectedImage(null);
96
+ }
97
+
98
+ // Update current chat's model if exists
99
+ if (currentChat) {
100
+ const updatedChat = { ...currentChat, modelName: model.name };
101
+ const updatedChats = chats.map(c => c.id === currentChat.id ? updatedChat : c);
102
+ setChats(updatedChats);
103
+ saveChats(updatedChats);
104
+ }
105
+ };
106
+
107
+ // Handle sending message
108
+ // Handle sending message
109
+ const handleSendMessage = async (messageText: string) => {
110
+ // 1. SOHBET AYARLAMA (Yeni mi mevcut mu?)
111
+ let chatToUpdate = currentChat;
112
+ let updatedChats = [...chats];
113
+
114
+ if (!chatToUpdate) {
115
+ const newChat = createNewChat(selectedModel.name);
116
+ chatToUpdate = newChat;
117
+ updatedChats = [newChat, ...chats];
118
+ setChats(updatedChats);
119
+ setCurrentChatId(newChat.id);
120
+ }
121
+
122
+ // 2. KULLANICI MESAJINI EKLE
123
+ const userMessage: Message = {
124
+ role: 'user',
125
+ content: messageText,
126
+ image: selectedImage || undefined
127
+ };
128
+
129
+ let updatedMessages = [...chatToUpdate.messages, userMessage];
130
+
131
+ // 3. ASİSTAN İÇİN BOŞ BİR BALON EKLE (Loading spinner veya boş alan için)
132
+ const assistantMessage: Message = {
133
+ role: 'assistant',
134
+ content: '' // Şimdilik boş, en sonda dolacak
135
+ };
136
+ updatedMessages = [...updatedMessages, assistantMessage];
137
+
138
+ // Sohbet başlığını ve listesini güncelle
139
+ chatToUpdate = {
140
+ ...chatToUpdate,
141
+ messages: updatedMessages,
142
+ updatedAt: Date.now(),
143
+ title: chatToUpdate.messages.length === 0 ? messageText.slice(0, 30) : chatToUpdate.title
144
+ };
145
+
146
+ updatedChats = updatedChats.map(c => c.id === chatToUpdate!.id ? chatToUpdate! : c);
147
+ setChats(updatedChats);
148
+ saveChats(updatedChats);
149
+
150
+ // Resmi temizle
151
+ setSelectedImage(null);
152
+
153
+ // 4. YANIT ÜRETİMİ
154
+ setIsLoading(true);
155
+ setLoadingStatus('Yanıt hazırlanıyor...');
156
+
157
+ // Gelen cevabı burada tutacağız (State'e hemen atmıyoruz)
158
+ let finalGeneratedText = "";
159
+
160
+ try {
161
+ await generateResponse({
162
+ model_name: selectedModel.path,
163
+ prompt: messageText,
164
+ system_prompt: systemPrompt,
165
+ max_tokens: generationParams.maxTokens,
166
+ temperature: generationParams.temperature,
167
+ top_p: generationParams.topP,
168
+ top_k: generationParams.topK,
169
+ image: userMessage.image,
170
+
171
+ // Streaming verisini alıyoruz ama EKRANA BASMIYORUZ (State güncellemiyoruz)
172
+ onToken: (streamedContent) => {
173
+ finalGeneratedText = streamedContent;
174
+ },
175
+ onProgress: (progress) => {
176
+ if (progress.message) setLoadingStatus(progress.message);
177
+ }
178
+ });
179
+
180
+ // 5. İŞLEM BİTTİ: ŞİMDİ TEK SEFERDE GÜNCELLE
181
+ setChats(currentChats => {
182
+ return currentChats.map(chat => {
183
+ if (chat.id === chatToUpdate!.id) {
184
+ const newMessages = [...chat.messages];
185
+ const lastMsgIndex = newMessages.length - 1;
186
+
187
+ // Son mesajı (boş olan asistan mesajını) tam metinle güncelle
188
+ newMessages[lastMsgIndex] = {
189
+ ...newMessages[lastMsgIndex],
190
+ content: finalGeneratedText // Hepsini tekte basıyoruz
191
+ };
192
+
193
+ return { ...chat, messages: newMessages };
194
+ }
195
+ return chat;
196
+ });
197
+ });
198
+
199
+ // Son halini kaydet
200
+ // Not: setChats asenkron olduğu için garanti olsun diye updatedChats üzerinden
201
+ // manuel bir güncelleme yapıp saveChats'e gönderebiliriz ama
202
+ // bir sonraki render'da saveChats çalışacağı için bu hali de yeterlidir.
203
+
204
+ } catch (error) {
205
+ console.error('Error generating response:', error);
206
+
207
+ // Hata durumunda güncelleme
208
+ setChats(currentChats => {
209
+ const newChats = currentChats.map(chat => {
210
+ if (chat.id === chatToUpdate!.id) {
211
+ const newMessages = [...chat.messages];
212
+ const lastMsgIndex = newMessages.length - 1;
213
+ newMessages[lastMsgIndex] = {
214
+ role: 'assistant',
215
+ content: `❌ Hata: ${error instanceof Error ? error.message : 'Bir hata oluştu.'}`
216
+ };
217
+ return { ...chat, messages: newMessages };
218
+ }
219
+ return chat;
220
+ });
221
+ saveChats(newChats);
222
+ return newChats;
223
+ });
224
+ } finally {
225
+ setIsLoading(false);
226
+ setLoadingStatus('');
227
+ }
228
+ };
229
+
230
+ return (
231
+ <div className="flex h-screen bg-gray-100 dark:bg-gray-950 transition-colors">
232
+ {/* Sidebar */}
233
+ <Sidebar
234
+ chats={chats}
235
+ currentChatId={currentChatId}
236
+ onSelectChat={handleSelectChat}
237
+ onNewChat={handleNewChat}
238
+ onDeleteChat={handleDeleteChat}
239
+ isCollapsed={isSidebarCollapsed}
240
+ onToggleCollapse={() => setIsSidebarCollapsed(!isSidebarCollapsed)}
241
+ />
242
+
243
+ {/* Main Content */}
244
+ <div className="flex-1 flex flex-col overflow-hidden">
245
+ {/* Header with Model Selector and Settings */}
246
+ <div className="bg-white dark:bg-gray-900 border-b border-gray-200 dark:border-gray-800 px-6 py-3 transition-colors">
247
+ <div className="flex items-center justify-between">
248
+ {/* Left: Model Selector */}
249
+ <div className="relative">
250
+ <select
251
+ value={selectedModel.name}
252
+ onChange={(e) => {
253
+ const model = MODELS.find(m => m.name === e.target.value);
254
+ if (model) handleModelChange(model);
255
+ }}
256
+ className="appearance-none px-3 py-1.5 pr-8 border border-gray-300 dark:border-gray-700 rounded-lg focus:ring-2 focus:ring-primary-500 dark:focus:ring-primary-600 focus:border-transparent bg-white dark:bg-gray-800 text-sm text-gray-900 dark:text-gray-100 transition-all cursor-pointer"
257
+ >
258
+ {MODELS.map((model) => (
259
+ <option key={model.path} value={model.name}>
260
+ {model.name}
261
+ </option>
262
+ ))}
263
+ </select>
264
+ <ChevronDown
265
+ className="absolute right-2 top-1/2 transform -translate-y-1/2 text-gray-400 dark:text-gray-500 pointer-events-none"
266
+ size={16}
267
+ />
268
+ </div>
269
+
270
+ {/* Center: Title (only when chat has messages) */}
271
+ {currentChat?.messages.length ? (
272
+ <div className="flex-1 text-center">
273
+ <p className="text-sm text-gray-600 dark:text-gray-400 truncate px-4">
274
+ {currentChat.title}
275
+ </p>
276
+ </div>
277
+ ) : (
278
+ <div className="flex-1"></div>
279
+ )}
280
+
281
+ {/* Right: Advanced Settings */}
282
+ <div className="relative" ref={settingsDropdownRef}>
283
+ <button
284
+ onClick={() => setShowAdvancedSettings(!showAdvancedSettings)}
285
+ className="flex items-center gap-2 px-3 py-1.5 text-sm text-gray-700 dark:text-gray-300 hover:bg-gray-100 dark:hover:bg-gray-800 rounded-lg transition-colors"
286
+ >
287
+ <Settings2 size={18} />
288
+ <span>Gelişmiş ayarlar</span>
289
+ </button>
290
+
291
+ {/* Advanced Settings Dropdown */}
292
+ {showAdvancedSettings && (
293
+ <div className="absolute right-0 top-full mt-2 w-96 bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg shadow-xl z-50 max-h-[70vh] overflow-y-auto">
294
+ <div className="p-4">
295
+ <div className="flex items-center justify-between mb-4">
296
+ <h3 className="font-semibold text-gray-900 dark:text-gray-100">Gelişmiş Ayarlar</h3>
297
+ <button
298
+ onClick={() => setShowAdvancedSettings(false)}
299
+ className="text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
300
+ >
301
+
302
+ </button>
303
+ </div>
304
+
305
+ {/* Settings Content Directly */}
306
+ <div className="space-y-4">
307
+ {/* System Prompt */}
308
+ <div>
309
+ <label className="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-2">
310
+ Sistem Prompt
311
+ </label>
312
+ <textarea
313
+ value={systemPrompt}
314
+ onChange={(e) => setSystemPrompt(e.target.value)}
315
+ rows={4}
316
+ className="w-full px-3 py-2 border border-gray-300 dark:border-gray-700 bg-white dark:bg-gray-800 text-gray-900 dark:text-gray-100 rounded-lg focus:ring-2 focus:ring-primary-500 dark:focus:ring-primary-600 focus:border-transparent resize-none transition-all text-sm"
317
+ placeholder="Sistem talimatlarını buraya girin..."
318
+ />
319
+ <button
320
+ onClick={() => setSystemPrompt(DEFAULT_SYSTEM_PROMPT)}
321
+ className="mt-2 flex items-center gap-1.5 text-xs text-primary-600 dark:text-primary-400 hover:text-primary-700 dark:hover:text-primary-300 transition-colors"
322
+ >
323
+ <span>🔄</span>
324
+ Varsayılana Dön
325
+ </button>
326
+ </div>
327
+
328
+ {/* Temperature */}
329
+ <div>
330
+ <div className="flex justify-between items-center mb-2">
331
+ <label className="text-sm font-medium text-gray-700 dark:text-gray-300">
332
+ Temperature
333
+ </label>
334
+ <span className="text-sm font-mono text-primary-600 dark:text-primary-400 bg-primary-50 dark:bg-primary-900/20 px-2 py-0.5 rounded">
335
+ {generationParams.temperature.toFixed(1)}
336
+ </span>
337
+ </div>
338
+ <input
339
+ type="range"
340
+ min="0"
341
+ max="1"
342
+ step="0.1"
343
+ value={generationParams.temperature}
344
+ onChange={(e) =>
345
+ setGenerationParams({ ...generationParams, temperature: parseFloat(e.target.value) })
346
+ }
347
+ className="w-full h-2 bg-gray-200 dark:bg-gray-700 rounded-lg appearance-none cursor-pointer accent-primary-600"
348
+ />
349
+ <p className="text-xs text-gray-500 dark:text-gray-400 mt-1.5">
350
+ Düşük: daha tutarlı • Yüksek: daha yaratıcı
351
+ </p>
352
+ </div>
353
+
354
+ {/* Max Tokens */}
355
+ <div>
356
+ <div className="flex justify-between items-center mb-2">
357
+ <label className="text-sm font-medium text-gray-700 dark:text-gray-300">
358
+ Max New Tokens
359
+ </label>
360
+ <span className="text-sm font-mono text-primary-600 dark:text-primary-400 bg-primary-50 dark:bg-primary-900/20 px-2 py-0.5 rounded">
361
+ {generationParams.maxTokens}
362
+ </span>
363
+ </div>
364
+ <input
365
+ type="range"
366
+ min="64"
367
+ max="4096"
368
+ step="64"
369
+ value={generationParams.maxTokens}
370
+ onChange={(e) =>
371
+ setGenerationParams({ ...generationParams, maxTokens: parseInt(e.target.value) })
372
+ }
373
+ className="w-full h-2 bg-gray-200 dark:bg-gray-700 rounded-lg appearance-none cursor-pointer accent-primary-600"
374
+ />
375
+ </div>
376
+
377
+ {/* Top-p */}
378
+ <div>
379
+ <div className="flex justify-between items-center mb-2">
380
+ <label className="text-sm font-medium text-gray-700 dark:text-gray-300">
381
+ Top-p (Nucleus Sampling)
382
+ </label>
383
+ <span className="text-sm font-mono text-primary-600 dark:text-primary-400 bg-primary-50 dark:bg-primary-900/20 px-2 py-0.5 rounded">
384
+ {generationParams.topP.toFixed(2)}
385
+ </span>
386
+ </div>
387
+ <input
388
+ type="range"
389
+ min="0.1"
390
+ max="1"
391
+ step="0.05"
392
+ value={generationParams.topP}
393
+ onChange={(e) =>
394
+ setGenerationParams({ ...generationParams, topP: parseFloat(e.target.value) })
395
+ }
396
+ className="w-full h-2 bg-gray-200 dark:bg-gray-700 rounded-lg appearance-none cursor-pointer accent-primary-600"
397
+ />
398
+ </div>
399
+
400
+ {/* Top-k */}
401
+ <div>
402
+ <div className="flex justify-between items-center mb-2">
403
+ <label className="text-sm font-medium text-gray-700 dark:text-gray-300">
404
+ Top-k
405
+ </label>
406
+ <span className="text-sm font-mono text-primary-600 dark:text-primary-400 bg-primary-50 dark:bg-primary-900/20 px-2 py-0.5 rounded">
407
+ {generationParams.topK}
408
+ </span>
409
+ </div>
410
+ <input
411
+ type="range"
412
+ min="1"
413
+ max="100"
414
+ step="1"
415
+ value={generationParams.topK}
416
+ onChange={(e) =>
417
+ setGenerationParams({ ...generationParams, topK: parseInt(e.target.value) })
418
+ }
419
+ className="w-full h-2 bg-gray-200 dark:bg-gray-700 rounded-lg appearance-none cursor-pointer accent-primary-600"
420
+ />
421
+ </div>
422
+ </div>
423
+ </div>
424
+ </div>
425
+ )}
426
+ </div>
427
+ </div>
428
+ </div>
429
+
430
+ {/* Chat Area or Welcome Screen */}
431
+ {currentChat?.messages.length ? (
432
+ <MessageList messages={currentChat.messages} />
433
+ ) : (
434
+ <div className="flex-1 flex flex-col items-center justify-center bg-gray-50 dark:bg-gray-950 px-4">
435
+ <div className="w-full max-w-3xl mx-auto text-center space-y-8">
436
+ {/* Main Welcome Text */}
437
+ <h1 className="text-4xl md:text-5xl lg:text-6xl font-medium text-gray-800 dark:text-gray-100">
438
+ Sen hazır olduğunda hazırım.
439
+ </h1>
440
+ </div>
441
+ </div>
442
+ )}
443
+
444
+ {/* Chat Input */}
445
+ <div className="bg-gray-50 dark:bg-gray-950 px-4 py-6">
446
+ {/* Loading Status */}
447
+ {loadingStatus && (
448
+ <div className="w-full max-w-4xl mx-auto mb-3 px-4 py-2 bg-blue-50 dark:bg-blue-900/20 border border-blue-200 dark:border-blue-800 rounded-lg">
449
+ <p className="text-sm text-blue-700 dark:text-blue-300 text-center">
450
+ {loadingStatus}
451
+ </p>
452
+ </div>
453
+ )}
454
+ <ChatInput
455
+ onSend={handleSendMessage}
456
+ isLoading={isLoading}
457
+ onImageSelect={setSelectedImage}
458
+ currentImage={selectedImage}
459
+ supportsImages={selectedModel.supportsImages}
460
+ />
461
+ </div>
462
+ </div>
463
+ </div>
464
+ );
465
+ }
466
+
467
+ export default App;
frontend/assets/react.svg ADDED
frontend/components/AdvancedSettings.tsx ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useState } from 'react';
2
+ import { GenerationParams, DEFAULT_SYSTEM_PROMPT } from '../types';
3
+ import { ChevronDown, ChevronUp, Settings, RotateCcw } from 'lucide-react';
4
+
5
+ interface AdvancedSettingsProps {
6
+ params: GenerationParams;
7
+ systemPrompt: string;
8
+ onParamsChange: (params: GenerationParams) => void;
9
+ onSystemPromptChange: (prompt: string) => void;
10
+ }
11
+
12
+ export const AdvancedSettings: React.FC<AdvancedSettingsProps> = ({
13
+ params,
14
+ systemPrompt,
15
+ onParamsChange,
16
+ onSystemPromptChange
17
+ }) => {
18
+ const [isOpen, setIsOpen] = useState(false);
19
+
20
+ return (
21
+ <div className="border border-gray-200 dark:border-gray-700 rounded-lg overflow-hidden bg-white dark:bg-gray-800 transition-colors">
22
+ <button
23
+ onClick={() => setIsOpen(!isOpen)}
24
+ className="w-full flex items-center justify-between px-4 py-3 text-left hover:bg-gray-50 dark:hover:bg-gray-700/50 transition-colors"
25
+ >
26
+ <div className="flex items-center gap-2">
27
+ <Settings size={18} className="text-gray-500 dark:text-gray-400" />
28
+ <span className="font-medium text-gray-700 dark:text-gray-300">Gelişmiş Ayarlar</span>
29
+ </div>
30
+ {isOpen ? (
31
+ <ChevronUp size={20} className="text-gray-400 dark:text-gray-500" />
32
+ ) : (
33
+ <ChevronDown size={20} className="text-gray-400 dark:text-gray-500" />
34
+ )}
35
+ </button>
36
+
37
+ {isOpen && (
38
+ <div className="px-4 pb-4 space-y-4 border-t border-gray-200 dark:border-gray-700 pt-4 bg-gray-50 dark:bg-gray-800/50">
39
+ {/* System Prompt */}
40
+ <div>
41
+ <label className="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-2">
42
+ Sistem Prompt
43
+ </label>
44
+ <textarea
45
+ value={systemPrompt}
46
+ onChange={(e) => onSystemPromptChange(e.target.value)}
47
+ rows={4}
48
+ className="w-full px-3 py-2 border border-gray-300 dark:border-gray-700 bg-white dark:bg-gray-800 text-gray-900 dark:text-gray-100 rounded-lg focus:ring-2 focus:ring-primary-500 dark:focus:ring-primary-600 focus:border-transparent resize-none transition-all text-sm"
49
+ placeholder="Sistem talimatlarını buraya girin..."
50
+ />
51
+ <button
52
+ onClick={() => onSystemPromptChange(DEFAULT_SYSTEM_PROMPT)}
53
+ className="mt-2 flex items-center gap-1.5 text-xs text-primary-600 dark:text-primary-400 hover:text-primary-700 dark:hover:text-primary-300 transition-colors"
54
+ >
55
+ <RotateCcw size={12} />
56
+ Varsayılana Dön
57
+ </button>
58
+ </div>
59
+
60
+ {/* Temperature */}
61
+ <div>
62
+ <div className="flex justify-between items-center mb-2">
63
+ <label className="text-sm font-medium text-gray-700 dark:text-gray-300">
64
+ Temperature
65
+ </label>
66
+ <span className="text-sm font-mono text-primary-600 dark:text-primary-400 bg-primary-50 dark:bg-primary-900/20 px-2 py-0.5 rounded">
67
+ {params.temperature.toFixed(1)}
68
+ </span>
69
+ </div>
70
+ <input
71
+ type="range"
72
+ min="0"
73
+ max="1"
74
+ step="0.1"
75
+ value={params.temperature}
76
+ onChange={(e) =>
77
+ onParamsChange({ ...params, temperature: parseFloat(e.target.value) })
78
+ }
79
+ className="w-full h-2 bg-gray-200 dark:bg-gray-700 rounded-lg appearance-none cursor-pointer accent-primary-600"
80
+ />
81
+ <p className="text-xs text-gray-500 dark:text-gray-400 mt-1.5">
82
+ Düşük: daha tutarlı • Yüksek: daha yaratıcı
83
+ </p>
84
+ </div>
85
+
86
+ {/* Max Tokens */}
87
+ <div>
88
+ <div className="flex justify-between items-center mb-2">
89
+ <label className="text-sm font-medium text-gray-700 dark:text-gray-300">
90
+ Max New Tokens
91
+ </label>
92
+ <span className="text-sm font-mono text-primary-600 dark:text-primary-400 bg-primary-50 dark:bg-primary-900/20 px-2 py-0.5 rounded">
93
+ {params.maxTokens}
94
+ </span>
95
+ </div>
96
+ <input
97
+ type="range"
98
+ min="64"
99
+ max="4096"
100
+ step="64"
101
+ value={params.maxTokens}
102
+ onChange={(e) =>
103
+ onParamsChange({ ...params, maxTokens: parseInt(e.target.value) })
104
+ }
105
+ className="w-full h-2 bg-gray-200 dark:bg-gray-700 rounded-lg appearance-none cursor-pointer accent-primary-600"
106
+ />
107
+ </div>
108
+
109
+ {/* Top-p */}
110
+ <div>
111
+ <div className="flex justify-between items-center mb-2">
112
+ <label className="text-sm font-medium text-gray-700 dark:text-gray-300">
113
+ Top-p (Nucleus Sampling)
114
+ </label>
115
+ <span className="text-sm font-mono text-primary-600 dark:text-primary-400 bg-primary-50 dark:bg-primary-900/20 px-2 py-0.5 rounded">
116
+ {params.topP.toFixed(2)}
117
+ </span>
118
+ </div>
119
+ <input
120
+ type="range"
121
+ min="0.1"
122
+ max="1"
123
+ step="0.05"
124
+ value={params.topP}
125
+ onChange={(e) =>
126
+ onParamsChange({ ...params, topP: parseFloat(e.target.value) })
127
+ }
128
+ className="w-full h-2 bg-gray-200 dark:bg-gray-700 rounded-lg appearance-none cursor-pointer accent-primary-600"
129
+ />
130
+ </div>
131
+
132
+ {/* Top-k */}
133
+ <div>
134
+ <div className="flex justify-between items-center mb-2">
135
+ <label className="text-sm font-medium text-gray-700 dark:text-gray-300">
136
+ Top-k
137
+ </label>
138
+ <span className="text-sm font-mono text-primary-600 dark:text-primary-400 bg-primary-50 dark:bg-primary-900/20 px-2 py-0.5 rounded">
139
+ {params.topK}
140
+ </span>
141
+ </div>
142
+ <input
143
+ type="range"
144
+ min="1"
145
+ max="100"
146
+ step="1"
147
+ value={params.topK}
148
+ onChange={(e) =>
149
+ onParamsChange({ ...params, topK: parseInt(e.target.value) })
150
+ }
151
+ className="w-full h-2 bg-gray-200 dark:bg-gray-700 rounded-lg appearance-none cursor-pointer accent-primary-600"
152
+ />
153
+ </div>
154
+ </div>
155
+ )}
156
+ </div>
157
+ );
158
+ };
frontend/components/ChatInput.tsx ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useState, KeyboardEvent, useRef } from 'react';
2
+ import { Send, Loader2, Plus, X } from 'lucide-react';
3
+
4
+ interface ChatInputProps {
5
+ onSend: (message: string) => void;
6
+ disabled?: boolean;
7
+ isLoading?: boolean;
8
+ onImageSelect?: (image: string | null) => void;
9
+ currentImage?: string | null;
10
+ supportsImages?: boolean;
11
+ }
12
+
13
+ export const ChatInput: React.FC<ChatInputProps> = ({
14
+ onSend,
15
+ disabled = false,
16
+ isLoading = false,
17
+ onImageSelect,
18
+ currentImage,
19
+ supportsImages = false
20
+ }) => {
21
+ const [input, setInput] = useState('');
22
+ const [preview, setPreview] = useState<string | null>(currentImage || null);
23
+ const fileInputRef = useRef<HTMLInputElement>(null);
24
+
25
+ const handleSend = () => {
26
+ if (input.trim() && !disabled && !isLoading) {
27
+ onSend(input.trim());
28
+ setInput('');
29
+ }
30
+ };
31
+
32
+ const handleKeyPress = (e: KeyboardEvent<HTMLTextAreaElement>) => {
33
+ if (e.key === 'Enter' && !e.shiftKey) {
34
+ e.preventDefault();
35
+ handleSend();
36
+ }
37
+ };
38
+
39
+ const handleFileChange = (e: React.ChangeEvent<HTMLInputElement>) => {
40
+ const file = e.target.files?.[0];
41
+ if (file) {
42
+ const reader = new FileReader();
43
+ reader.onloadend = () => {
44
+ const result = reader.result as string;
45
+ setPreview(result);
46
+ onImageSelect?.(result);
47
+ };
48
+ reader.readAsDataURL(file);
49
+ }
50
+ };
51
+
52
+ const handleRemoveImage = () => {
53
+ setPreview(null);
54
+ onImageSelect?.(null);
55
+ if (fileInputRef.current) {
56
+ fileInputRef.current.value = '';
57
+ }
58
+ };
59
+
60
+ // Update preview when currentImage changes
61
+ React.useEffect(() => {
62
+ setPreview(currentImage || null);
63
+ }, [currentImage]);
64
+
65
+ return (
66
+ <div className="w-full max-w-4xl mx-auto">
67
+ {/* Image Preview */}
68
+ {preview && (
69
+ <div className="mb-3 relative inline-block">
70
+ <img
71
+ src={preview}
72
+ alt="Preview"
73
+ className="max-w-xs h-32 object-contain rounded-lg border border-gray-300 dark:border-gray-700"
74
+ />
75
+ <button
76
+ onClick={handleRemoveImage}
77
+ className="absolute -top-2 -right-2 p-1 bg-red-500 hover:bg-red-600 text-white rounded-full transition-all shadow-lg"
78
+ >
79
+ <X size={14} />
80
+ </button>
81
+ </div>
82
+ )}
83
+
84
+ <div className="relative flex items-center gap-2 bg-white dark:bg-gray-800 rounded-3xl shadow-lg border border-gray-200 dark:border-gray-700 px-3 py-3 transition-all focus-within:shadow-xl focus-within:border-primary-500 dark:focus-within:border-primary-600">
85
+ {/* Hidden File Input */}
86
+ <input
87
+ ref={fileInputRef}
88
+ type="file"
89
+ accept="image/*"
90
+ onChange={handleFileChange}
91
+ className="hidden"
92
+ disabled={!supportsImages}
93
+ />
94
+
95
+ {/* Plus Button for Image Upload */}
96
+ <button
97
+ onClick={() => fileInputRef.current?.click()}
98
+ disabled={!supportsImages}
99
+ className={`flex-shrink-0 w-8 h-8 rounded-lg flex items-center justify-center transition-all ${
100
+ supportsImages
101
+ ? 'hover:bg-gray-100 dark:hover:bg-gray-700 text-gray-600 dark:text-gray-400'
102
+ : 'text-gray-300 dark:text-gray-600 cursor-not-allowed'
103
+ }`}
104
+ title={supportsImages ? "Resim yükle" : "Bu model resim desteklemiyor"}
105
+ >
106
+ <Plus size={20} />
107
+ </button>
108
+
109
+ <textarea
110
+ value={input}
111
+ onChange={(e) => setInput(e.target.value)}
112
+ onKeyPress={handleKeyPress}
113
+ placeholder="Herhangi bir şey sor"
114
+ rows={1}
115
+ disabled={disabled || isLoading}
116
+ className="flex-1 bg-transparent text-gray-900 dark:text-gray-100 resize-none border-none focus:outline-none focus:ring-0 placeholder:text-gray-400 dark:placeholder:text-gray-500 disabled:cursor-not-allowed"
117
+ style={{ minHeight: '24px', maxHeight: '120px' }}
118
+ onInput={(e) => {
119
+ const target = e.target as HTMLTextAreaElement;
120
+ target.style.height = 'auto';
121
+ target.style.height = Math.min(target.scrollHeight, 120) + 'px';
122
+ }}
123
+ />
124
+ <button
125
+ onClick={handleSend}
126
+ disabled={!input.trim() || disabled || isLoading}
127
+ className="flex-shrink-0 w-8 h-8 bg-gray-200 hover:bg-gray-300 dark:bg-gray-700 dark:hover:bg-gray-600 disabled:bg-gray-100 dark:disabled:bg-gray-800 disabled:cursor-not-allowed rounded-lg flex items-center justify-center transition-all"
128
+ >
129
+ {isLoading ? (
130
+ <Loader2 size={18} className="animate-spin text-gray-600 dark:text-gray-400" />
131
+ ) : (
132
+ <Send size={18} className="text-gray-600 dark:text-gray-300" />
133
+ )}
134
+ </button>
135
+ </div>
136
+
137
+ </div>
138
+ );
139
+ };
frontend/components/ImageUpload.tsx ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useRef, useState } from 'react';
2
+ import { Upload, X, Image as ImageIcon } from 'lucide-react';
3
+
4
+ interface ImageUploadProps {
5
+ onImageSelect: (image: string | null) => void;
6
+ currentImage: string | null;
7
+ disabled?: boolean;
8
+ }
9
+
10
+ export const ImageUpload: React.FC<ImageUploadProps> = ({
11
+ onImageSelect,
12
+ currentImage,
13
+ disabled = false
14
+ }) => {
15
+ const fileInputRef = useRef<HTMLInputElement>(null);
16
+ const [preview, setPreview] = useState<string | null>(currentImage);
17
+
18
+ const handleFileChange = (e: React.ChangeEvent<HTMLInputElement>) => {
19
+ const file = e.target.files?.[0];
20
+ if (file) {
21
+ const reader = new FileReader();
22
+ reader.onloadend = () => {
23
+ const result = reader.result as string;
24
+ setPreview(result);
25
+ onImageSelect(result);
26
+ };
27
+ reader.readAsDataURL(file);
28
+ }
29
+ };
30
+
31
+ const handleRemove = () => {
32
+ setPreview(null);
33
+ onImageSelect(null);
34
+ if (fileInputRef.current) {
35
+ fileInputRef.current.value = '';
36
+ }
37
+ };
38
+
39
+ const handleClick = () => {
40
+ if (!disabled) {
41
+ fileInputRef.current?.click();
42
+ }
43
+ };
44
+
45
+ return (
46
+ <div>
47
+ <label className="flex items-center gap-2 text-sm font-medium text-gray-700 dark:text-gray-300 mb-2">
48
+ <ImageIcon size={16} />
49
+ Resim Yükle
50
+ {disabled && <span className="text-gray-400 dark:text-gray-600 text-xs font-normal">(Sadece Gemma 3 12B)</span>}
51
+ </label>
52
+
53
+ <input
54
+ ref={fileInputRef}
55
+ type="file"
56
+ accept="image/*"
57
+ onChange={handleFileChange}
58
+ className="hidden"
59
+ disabled={disabled}
60
+ />
61
+
62
+ {preview ? (
63
+ <div className="relative inline-block group">
64
+ <img
65
+ src={preview}
66
+ alt="Preview"
67
+ className="max-w-full h-40 object-contain rounded-lg border-2 border-gray-200 dark:border-gray-700"
68
+ />
69
+ <button
70
+ onClick={handleRemove}
71
+ className="absolute top-2 right-2 p-1.5 bg-red-500 hover:bg-red-600 text-white rounded-lg transition-all shadow-lg opacity-0 group-hover:opacity-100"
72
+ >
73
+ <X size={16} />
74
+ </button>
75
+ </div>
76
+ ) : (
77
+ <button
78
+ onClick={handleClick}
79
+ disabled={disabled}
80
+ className={`w-full border-2 border-dashed rounded-lg p-6 text-center transition-all ${
81
+ disabled
82
+ ? 'border-gray-200 dark:border-gray-800 bg-gray-50 dark:bg-gray-900 cursor-not-allowed'
83
+ : 'border-gray-300 dark:border-gray-700 hover:border-primary-400 dark:hover:border-primary-600 hover:bg-primary-50 dark:hover:bg-primary-900/10 cursor-pointer'
84
+ }`}
85
+ >
86
+ <Upload
87
+ className={`mx-auto mb-2 ${disabled ? 'text-gray-300 dark:text-gray-700' : 'text-gray-400 dark:text-gray-500'}`}
88
+ size={28}
89
+ />
90
+ <p className={`text-sm ${disabled ? 'text-gray-400 dark:text-gray-600' : 'text-gray-600 dark:text-gray-400'}`}>
91
+ {disabled ? 'Resim yüklemek için görsel destekli model seçin' : 'Resim yüklemek için tıklayın'}
92
+ </p>
93
+ {!disabled && (
94
+ <p className="text-xs text-gray-500 dark:text-gray-500 mt-1">PNG, JPG, GIF</p>
95
+ )}
96
+ </button>
97
+ )}
98
+ </div>
99
+ );
100
+ };
frontend/components/MessageList.tsx ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React from 'react';
2
+ import { Message } from '../types';
3
+ import { User, Bot, Sparkles } from 'lucide-react';
4
+
5
+ interface MessageListProps {
6
+ messages: Message[];
7
+ }
8
+
9
+ export const MessageList: React.FC<MessageListProps> = ({ messages }) => {
10
+ const formatMessage = (content: string) => {
11
+ // Split by solution tags for better formatting
12
+ const parts = content.split(/(<start_working_out>|<end_working_out>|<SOLUTION>|<\/SOLUTION>)/g);
13
+
14
+ return parts.map((part, idx) => {
15
+ if (part === '<start_working_out>') {
16
+ return (
17
+ <div key={idx} className="inline-flex items-center gap-2 text-primary-600 dark:text-primary-400 font-semibold mt-3 mb-2">
18
+ <Sparkles size={16} className="flex-shrink-0" />
19
+ <span className="leading-none">Düşünme Süreci</span>
20
+ </div>
21
+ );
22
+ } else if (part === '<end_working_out>') {
23
+ return null;
24
+ } else if (part === '<SOLUTION>') {
25
+ return (
26
+ <div key={idx} className="inline-flex items-center gap-2 text-green-600 dark:text-green-400 font-semibold mt-4 mb-2">
27
+ <svg className="w-4 h-4 flex-shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
28
+ <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z" />
29
+ </svg>
30
+ <span className="leading-none">Çözüm</span>
31
+ </div>
32
+ );
33
+ } else if (part === '</SOLUTION>') {
34
+ return null;
35
+ } else if (part.trim()) {
36
+ return <div key={idx} className="whitespace-pre-wrap leading-relaxed">{part.trim()}</div>;
37
+ }
38
+ return null;
39
+ });
40
+ };
41
+
42
+ if (messages.length === 0) {
43
+ return (
44
+ <div className="flex-1 flex items-center justify-center text-gray-400 dark:text-gray-600 bg-gray-50 dark:bg-gray-900">
45
+ <div className="text-center max-w-md px-4">
46
+ <div className="w-20 h-20 bg-gradient-to-br from-primary-100 to-primary-200 dark:from-primary-900/30 dark:to-primary-800/30 rounded-2xl flex items-center justify-center mx-auto mb-4">
47
+ <Bot size={40} className="text-primary-600 dark:text-primary-400" />
48
+ </div>
49
+ <h3 className="text-xl font-semibold text-gray-700 dark:text-gray-300 mb-2">
50
+ Sohbete Başlayın
51
+ </h3>
52
+ <p className="text-sm text-gray-500 dark:text-gray-400">
53
+ Türkçe AI modeliyle muhakeme sorularınızı sorun ve detaylı açıklamalar alın
54
+ </p>
55
+ </div>
56
+ </div>
57
+ );
58
+ }
59
+
60
+ return (
61
+ <div className="flex-1 overflow-y-auto px-4 py-6 space-y-6 bg-white dark:bg-gray-950">
62
+ <div className="max-w-3xl mx-auto space-y-6">
63
+ {messages.map((message, idx) => {
64
+ if (message.role === 'system') return null;
65
+
66
+ return (
67
+ <div
68
+ key={idx}
69
+ className="flex gap-4 group"
70
+ >
71
+ {/* Avatar */}
72
+ <div className="flex-shrink-0">
73
+ {message.role === 'assistant' ? (
74
+ <div className="w-7 h-7 bg-primary-600 rounded-sm flex items-center justify-center">
75
+ <Bot size={16} className="text-white" />
76
+ </div>
77
+ ) : (
78
+ <div className="w-7 h-7 bg-gray-700 dark:bg-gray-600 rounded-sm flex items-center justify-center">
79
+ <User size={16} className="text-white" />
80
+ </div>
81
+ )}
82
+ </div>
83
+
84
+ {/* Message Content */}
85
+ <div className="flex-1 min-w-0 pt-1">
86
+ {message.image && (
87
+ <img
88
+ src={message.image}
89
+ alt="User upload"
90
+ className="max-w-sm rounded-lg mb-3 border border-gray-200 dark:border-gray-700"
91
+ />
92
+ )}
93
+ <div className="text-sm text-gray-800 dark:text-gray-200 leading-relaxed">
94
+ {message.role === 'assistant' ? (
95
+ formatMessage(message.content)
96
+ ) : (
97
+ <div className="whitespace-pre-wrap">{message.content}</div>
98
+ )}
99
+ </div>
100
+ </div>
101
+ </div>
102
+ );
103
+ })}
104
+ </div>
105
+ </div>
106
+ );
107
+ };
frontend/components/ModelSelector.tsx ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React from 'react';
2
+ import { MODELS, ModelConfig } from '../types';
3
+ import { ChevronDown, Cpu } from 'lucide-react';
4
+
5
+ interface ModelSelectorProps {
6
+ selectedModel: ModelConfig;
7
+ onModelChange: (model: ModelConfig) => void;
8
+ }
9
+
10
+ export const ModelSelector: React.FC<ModelSelectorProps> = ({
11
+ selectedModel,
12
+ onModelChange
13
+ }) => {
14
+ return (
15
+ <div className="relative">
16
+ <label className="flex items-center gap-2 text-sm font-medium text-gray-700 dark:text-gray-300 mb-2">
17
+ <Cpu size={16} />
18
+ Model Seçimi
19
+ </label>
20
+ <div className="relative">
21
+ <select
22
+ value={selectedModel.name}
23
+ onChange={(e) => {
24
+ const model = MODELS.find(m => m.name === e.target.value);
25
+ if (model) onModelChange(model);
26
+ }}
27
+ className="w-full appearance-none px-4 py-2.5 pr-10 border border-gray-300 dark:border-gray-700 rounded-lg focus:ring-2 focus:ring-primary-500 dark:focus:ring-primary-600 focus:border-transparent bg-white dark:bg-gray-800 text-gray-900 dark:text-gray-100 transition-all"
28
+ >
29
+ {MODELS.map((model) => (
30
+ <option key={model.path} value={model.name}>
31
+ {model.name}
32
+ </option>
33
+ ))}
34
+ </select>
35
+ <ChevronDown
36
+ className="absolute right-3 top-1/2 transform -translate-y-1/2 text-gray-400 dark:text-gray-500 pointer-events-none"
37
+ size={20}
38
+ />
39
+ </div>
40
+ {selectedModel.supportsImages && (
41
+ <div className="mt-2 flex items-center gap-1.5 text-xs text-primary-600 dark:text-primary-400 bg-primary-50 dark:bg-primary-900/20 px-2.5 py-1.5 rounded-md w-fit">
42
+ <svg className="w-3.5 h-3.5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
43
+ <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" />
44
+ </svg>
45
+ <span>Resim desteği aktif</span>
46
+ </div>
47
+ )}
48
+ </div>
49
+ );
50
+ };
frontend/components/Sidebar.tsx ADDED
@@ -0,0 +1,193 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useState } from 'react';
2
+ import { Chat } from '../types';
3
+ import { MessageSquarePlus, MessageSquare, Trash2, Moon, Sun, PanelLeftClose, PanelLeft, Search } from 'lucide-react';
4
+ import { useTheme } from '../contexts/ThemeContext';
5
+
6
+ interface SidebarProps {
7
+ chats: Chat[];
8
+ currentChatId: string | null;
9
+ onSelectChat: (chatId: string) => void;
10
+ onNewChat: () => void;
11
+ onDeleteChat: (chatId: string) => void;
12
+ isCollapsed: boolean;
13
+ onToggleCollapse: () => void;
14
+ }
15
+
16
+ export const Sidebar: React.FC<SidebarProps> = ({
17
+ chats,
18
+ currentChatId,
19
+ onSelectChat,
20
+ onNewChat,
21
+ onDeleteChat,
22
+ isCollapsed,
23
+ onToggleCollapse
24
+ }) => {
25
+ const { theme, toggleTheme } = useTheme();
26
+ const [searchQuery, setSearchQuery] = useState('');
27
+ const [isSearchOpen, setIsSearchOpen] = useState(false);
28
+ const searchInputRef = React.useRef<HTMLInputElement>(null);
29
+
30
+ // Sohbetleri arama terimine göre filtrele
31
+ const filteredChats = chats.filter(chat =>
32
+ chat.title.toLowerCase().includes(searchQuery.toLowerCase())
33
+ );
34
+
35
+ // Arama açıldığında input'a focus yap
36
+ React.useEffect(() => {
37
+ if (isSearchOpen && searchInputRef.current) {
38
+ searchInputRef.current.focus();
39
+ }
40
+ }, [isSearchOpen]);
41
+
42
+ return (
43
+ <div className={`${isCollapsed ? 'w-14' : 'w-72'} bg-gray-900 dark:bg-gray-100 border-r border-gray-800 dark:border-gray-300 flex flex-col h-screen transition-all duration-300`}>
44
+ {/* Header */}
45
+ <div className="p-2 space-y-1">
46
+ {!isCollapsed ? (
47
+ <>
48
+ <button
49
+ onClick={onToggleCollapse}
50
+ className="w-full flex items-center px-3 py-2 text-gray-400 dark:text-gray-600 hover:text-gray-200 dark:hover:text-gray-900 hover:bg-gray-800 dark:hover:bg-gray-200 rounded-lg transition-all"
51
+ title="Sidebar'ı Daralt"
52
+ >
53
+ <PanelLeftClose size={18} />
54
+ </button>
55
+ <button
56
+ onClick={onNewChat}
57
+ className="w-full flex items-center gap-2 px-3 py-2.5 text-gray-200 dark:text-gray-900 hover:bg-gray-800 dark:hover:bg-gray-200 rounded-lg transition-all font-medium text-sm"
58
+ >
59
+ <MessageSquarePlus size={18} />
60
+ <span>Yeni sohbet</span>
61
+ </button>
62
+ {/* Search Input */}
63
+ {!isSearchOpen ? (
64
+ <button
65
+ onClick={() => setIsSearchOpen(true)}
66
+ className="w-full flex items-center gap-2 px-3 py-2 text-gray-400 dark:text-gray-600 hover:text-gray-200 dark:hover:text-gray-900 hover:bg-gray-800 dark:hover:bg-gray-200 rounded-lg transition-all text-sm"
67
+ >
68
+ <Search size={18} />
69
+ <span>Sohbet ara</span>
70
+ </button>
71
+ ) : (
72
+ <div className="relative">
73
+ <Search size={16} className="absolute left-3 top-1/2 transform -translate-y-1/2 text-gray-400 dark:text-gray-600" />
74
+ <input
75
+ ref={searchInputRef}
76
+ type="text"
77
+ placeholder="Sohbet ara..."
78
+ value={searchQuery}
79
+ onChange={(e) => setSearchQuery(e.target.value)}
80
+ onBlur={() => {
81
+ if (!searchQuery) {
82
+ setIsSearchOpen(false);
83
+ }
84
+ }}
85
+ className="w-full pl-9 pr-3 py-2 bg-gray-800 dark:bg-gray-200 text-gray-200 dark:text-gray-900 placeholder-gray-500 dark:placeholder-gray-600 rounded-lg text-sm focus:outline-none focus:ring-2 focus:ring-gray-600 dark:focus:ring-gray-400 transition-all"
86
+ />
87
+ </div>
88
+ )}
89
+ </>
90
+ ) : (
91
+ <>
92
+ <button
93
+ onClick={onToggleCollapse}
94
+ className="w-full flex items-center justify-center py-2.5 text-gray-400 dark:text-gray-600 hover:text-gray-200 dark:hover:text-gray-900 hover:bg-gray-800 dark:hover:bg-gray-200 rounded-lg transition-all"
95
+ title="Sidebar'ı Genişlet"
96
+ >
97
+ <PanelLeft size={20} />
98
+ </button>
99
+ <button
100
+ onClick={onNewChat}
101
+ className="w-full flex items-center justify-center py-2.5 text-gray-200 dark:text-gray-900 hover:bg-gray-800 dark:hover:bg-gray-200 rounded-lg transition-all"
102
+ title="Yeni sohbet"
103
+ >
104
+ <MessageSquarePlus size={20} />
105
+ </button>
106
+ </>
107
+ )}
108
+ </div>
109
+
110
+ {/* Chat List */}
111
+ <div className="flex-1 overflow-y-auto px-2">
112
+ {chats.length === 0 ? (
113
+ !isCollapsed && (
114
+ <div className="p-6 text-center">
115
+ <MessageSquare size={32} className="mx-auto mb-3 text-gray-600 dark:text-gray-400" />
116
+ <p className="text-sm text-gray-400 dark:text-gray-600">Henüz sohbet yok</p>
117
+ </div>
118
+ )
119
+ ) : filteredChats.length === 0 ? (
120
+ !isCollapsed && (
121
+ <div className="p-6 text-center">
122
+ <Search size={32} className="mx-auto mb-3 text-gray-600 dark:text-gray-400" />
123
+ <p className="text-sm text-gray-400 dark:text-gray-600">Sonuç bulunamadı</p>
124
+ </div>
125
+ )
126
+ ) : (
127
+ <div className="space-y-1">
128
+ {filteredChats.map((chat) => (
129
+ <div
130
+ key={chat.id}
131
+ className={`group relative flex items-center ${isCollapsed ? 'justify-center py-2.5' : 'gap-2 px-3 py-2'} rounded-lg cursor-pointer transition-all ${
132
+ currentChatId === chat.id
133
+ ? 'bg-gray-800 dark:bg-gray-200'
134
+ : 'hover:bg-gray-800 dark:hover:bg-gray-200'
135
+ }`}
136
+ onClick={() => onSelectChat(chat.id)}
137
+ title={isCollapsed ? chat.title : ''}
138
+ >
139
+ <MessageSquare
140
+ size={isCollapsed ? 20 : 16}
141
+ className={`flex-shrink-0 ${
142
+ currentChatId === chat.id
143
+ ? 'text-gray-200 dark:text-gray-900'
144
+ : 'text-gray-400 dark:text-gray-600'
145
+ }`}
146
+ />
147
+ {!isCollapsed && (
148
+ <>
149
+ <div className="flex-1 min-w-0">
150
+ <p className="text-sm truncate text-gray-200 dark:text-gray-900">
151
+ {chat.title}
152
+ </p>
153
+ </div>
154
+ <button
155
+ onClick={(e) => {
156
+ e.stopPropagation();
157
+ onDeleteChat(chat.id);
158
+ }}
159
+ className="opacity-0 group-hover:opacity-100 p-1 hover:bg-gray-700 dark:hover:bg-gray-300 text-gray-400 dark:text-gray-600 rounded transition-all"
160
+ >
161
+ <Trash2 size={14} />
162
+ </button>
163
+ </>
164
+ )}
165
+ </div>
166
+ ))}
167
+ </div>
168
+ )}
169
+ </div>
170
+
171
+ {/* Footer */}
172
+ <div className="p-2 border-t border-gray-800 dark:border-gray-300">
173
+ <button
174
+ onClick={toggleTheme}
175
+ className={`w-full flex items-center ${isCollapsed ? 'justify-center py-2.5' : 'gap-2 px-3 py-2'} text-sm text-gray-400 dark:text-gray-600 hover:text-gray-200 dark:hover:text-gray-900 hover:bg-gray-800 dark:hover:bg-gray-200 rounded-lg transition-colors`}
176
+ title={isCollapsed ? (theme === 'light' ? 'Karanlık tema' : 'Aydınlık tema') : ''}
177
+ >
178
+ {theme === 'light' ? (
179
+ <>
180
+ <Moon size={isCollapsed ? 20 : 16} />
181
+ {!isCollapsed && <span>Karanlık tema</span>}
182
+ </>
183
+ ) : (
184
+ <>
185
+ <Sun size={isCollapsed ? 20 : 16} />
186
+ {!isCollapsed && <span>Aydınlık tema</span>}
187
+ </>
188
+ )}
189
+ </button>
190
+ </div>
191
+ </div>
192
+ );
193
+ };
frontend/contexts/ThemeContext.tsx ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { createContext, useContext, useEffect, useState } from 'react';
2
+
3
+ type Theme = 'light' | 'dark';
4
+
5
+ interface ThemeContextType {
6
+ theme: Theme;
7
+ toggleTheme: () => void;
8
+ }
9
+
10
+ const ThemeContext = createContext<ThemeContextType | undefined>(undefined);
11
+
12
+ export const ThemeProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => {
13
+ const [theme, setTheme] = useState<Theme>(() => {
14
+ const saved = localStorage.getItem('theme') as Theme;
15
+ return saved || (window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light');
16
+ });
17
+
18
+ useEffect(() => {
19
+ localStorage.setItem('theme', theme);
20
+ document.documentElement.classList.toggle('dark', theme === 'dark');
21
+ }, [theme]);
22
+
23
+ const toggleTheme = () => {
24
+ setTheme(prev => prev === 'light' ? 'dark' : 'light');
25
+ };
26
+
27
+ return (
28
+ <ThemeContext.Provider value={{ theme, toggleTheme }}>
29
+ {children}
30
+ </ThemeContext.Provider>
31
+ );
32
+ };
33
+
34
+ export const useTheme = () => {
35
+ const context = useContext(ThemeContext);
36
+ if (!context) {
37
+ throw new Error('useTheme must be used within ThemeProvider');
38
+ }
39
+ return context;
40
+ };
41
+
42
+
43
+
44
+
45
+
frontend/index.css ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ @tailwind base;
2
+ @tailwind components;
3
+ @tailwind utilities;
4
+
5
+ :root {
6
+ font-family: Inter, system-ui, Avenir, Helvetica, Arial, sans-serif;
7
+ line-height: 1.5;
8
+ font-weight: 400;
9
+
10
+ color-scheme: light dark;
11
+ color: rgba(255, 255, 255, 0.87);
12
+ background-color: #242424;
13
+
14
+ font-synthesis: none;
15
+ text-rendering: optimizeLegibility;
16
+ -webkit-font-smoothing: antialiased;
17
+ -moz-osx-font-smoothing: grayscale;
18
+ }
19
+
20
+ * {
21
+ margin: 0;
22
+ padding: 0;
23
+ box-sizing: border-box;
24
+ }
25
+
26
+ body {
27
+ margin: 0;
28
+ display: flex;
29
+ place-items: center;
30
+ min-width: 320px;
31
+ min-height: 100vh;
32
+ }
33
+
34
+ #root {
35
+ width: 100%;
36
+ height: 100vh;
37
+ }
38
+
39
+ /* Scrollbar Styling */
40
+ ::-webkit-scrollbar {
41
+ width: 8px;
42
+ height: 8px;
43
+ }
44
+
45
+ ::-webkit-scrollbar-track {
46
+ background: transparent;
47
+ }
48
+
49
+ ::-webkit-scrollbar-thumb {
50
+ background: #4b5563;
51
+ border-radius: 4px;
52
+ }
53
+
54
+ ::-webkit-scrollbar-thumb:hover {
55
+ background: #6b7280;
56
+ }
57
+
58
+ .dark ::-webkit-scrollbar-thumb {
59
+ background: #374151;
60
+ }
61
+
62
+ .dark ::-webkit-scrollbar-thumb:hover {
63
+ background: #4b5563;
64
+ }
frontend/index.html ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!doctype html>
2
+ <html lang="tr">
3
+ <head>
4
+ <meta charset="UTF-8" />
5
+ <link rel="icon" type="image/svg+xml" href="/vite.svg" />
6
+ <meta name="viewport" content="width=device-width, initial-scale=1.0" />
7
+ <title>Turkish AI Chat</title>
8
+ </head>
9
+ <body>
10
+ <div id="root"></div>
11
+ <script type="module" src="/main.tsx"></script>
12
+ </body>
13
+ </html>
frontend/main.tsx ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { StrictMode } from 'react'
2
+ import { createRoot } from 'react-dom/client'
3
+ import './index.css'
4
+ import App from './App.tsx'
5
+ import { ThemeProvider } from './contexts/ThemeContext'
6
+
7
+ createRoot(document.getElementById('root')!).render(
8
+ <StrictMode>
9
+ <ThemeProvider>
10
+ <App />
11
+ </ThemeProvider>
12
+ </StrictMode>,
13
+ )
frontend/package-lock.json ADDED
The diff for this file is too large to render. See raw diff
 
frontend/package.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "turkish-reasoning-chat",
3
+ "private": true,
4
+ "version": "0.0.0",
5
+ "type": "module",
6
+ "scripts": {
7
+ "dev": "vite",
8
+ "build": "tsc -b && vite build",
9
+ "lint": "eslint .",
10
+ "preview": "vite preview"
11
+ },
12
+ "dependencies": {
13
+ "lucide-react": "^0.454.0",
14
+ "react": "^18.3.1",
15
+ "react-dom": "^18.3.1"
16
+ },
17
+ "devDependencies": {
18
+ "@eslint/js": "^9.9.0",
19
+ "@types/react": "^18.3.3",
20
+ "@types/react-dom": "^18.3.0",
21
+ "@vitejs/plugin-react": "^4.3.1",
22
+ "autoprefixer": "^10.4.20",
23
+ "eslint": "^9.9.0",
24
+ "eslint-plugin-react-hooks": "^5.1.0-rc.0",
25
+ "eslint-plugin-react-refresh": "^0.4.9",
26
+ "globals": "^15.9.0",
27
+ "postcss": "^8.4.47",
28
+ "tailwindcss": "^3.4.13",
29
+ "typescript": "^5.5.3",
30
+ "typescript-eslint": "^8.0.1",
31
+ "vite": "^5.4.1"
32
+ }
33
+ }
frontend/postcss.config.js ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ export default {
2
+ plugins: {
3
+ tailwindcss: {},
4
+ autoprefixer: {},
5
+ },
6
+ }
frontend/tailwind.config.js ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /** @type {import('tailwindcss').Config} */
2
+ export default {
3
+ content: [
4
+ "./index.html",
5
+ "./App.tsx",
6
+ "./main.tsx",
7
+ "./components/**/*.{js,ts,jsx,tsx}",
8
+ "./contexts/**/*.{js,ts,jsx,tsx}",
9
+ "./utils/**/*.{js,ts,jsx,tsx}",
10
+ ],
11
+ darkMode: 'class',
12
+ theme: {
13
+ extend: {
14
+ colors: {
15
+ primary: {
16
+ 50: '#f0f9ff',
17
+ 100: '#e0f2fe',
18
+ 200: '#bae6fd',
19
+ 300: '#7dd3fc',
20
+ 400: '#38bdf8',
21
+ 500: '#0ea5e9',
22
+ 600: '#0284c7',
23
+ 700: '#0369a1',
24
+ 800: '#075985',
25
+ 900: '#0c4a6e',
26
+ }
27
+ }
28
+ },
29
+ },
30
+ plugins: [],
31
+ }
frontend/tsconfig.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "compilerOptions": {
3
+ "target": "ES2020",
4
+ "useDefineForClassFields": true,
5
+ "lib": [
6
+ "ES2020",
7
+ "DOM",
8
+ "DOM.Iterable"
9
+ ],
10
+ "module": "ESNext",
11
+ "skipLibCheck": true,
12
+ "moduleResolution": "bundler",
13
+ "allowImportingTsExtensions": true,
14
+ "resolveJsonModule": true,
15
+ "isolatedModules": true,
16
+ "noEmit": true,
17
+ "jsx": "react-jsx",
18
+ "strict": true
19
+ },
20
+ "include": [
21
+ "."
22
+ ]
23
+ }
frontend/types.ts ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ export interface Message {
2
+ role: 'system' | 'user' | 'assistant';
3
+ content: string;
4
+ image?: string;
5
+ }
6
+
7
+ export interface Chat {
8
+ id: string;
9
+ title: string;
10
+ messages: Message[];
11
+ createdAt: number;
12
+ updatedAt: number;
13
+ modelName: string;
14
+ }
15
+
16
+ export interface ModelConfig {
17
+ name: string;
18
+ path: string;
19
+ supportsImages: boolean;
20
+ }
21
+
22
+ export interface GenerationParams {
23
+ temperature: number;
24
+ maxTokens: number;
25
+ topP: number;
26
+ topK: number;
27
+ }
28
+
29
+ /**
30
+ * Available models for local inference
31
+ *
32
+ * ⚠️ IMPORTANT: Your Turkish models are PEFT/LoRA adapters, not full models!
33
+ * They currently fallback to BASE MODELS without Turkish fine-tuning.
34
+ *
35
+ * To use your Turkish models:
36
+ * 1. Merge LoRA adapters with base model (Python)
37
+ * 2. Export merged model to ONNX format
38
+ * 3. Upload to HuggingFace or host locally
39
+ * 4. Update model paths here
40
+ *
41
+ * See PEFT_TO_ONNX_GUIDE.md for complete instructions.
42
+ *
43
+ * Current behavior:
44
+ * - Chan-Y/TurkishReasoner-* → Uses base Gemma/Qwen/Llama (NO Turkish training)
45
+ * - Base models will respond but WITHOUT your Turkish fine-tuning
46
+ * - Merge & export ONNX to get Turkish responses
47
+ */
48
+ export const MODELS: ModelConfig[] = [
49
+ {
50
+ name: "Gemma 3 1B Turkish Reasoning",
51
+ path: "Chan-Y/TurkishReasoner-Gemma3-1B",
52
+ supportsImages: false
53
+ },
54
+ {
55
+ name: "Gemma 3 12B Turkish (Supports Images)",
56
+ path: "Chan-Y/TurkishReasoner-Gemma3-12B",
57
+ supportsImages: true
58
+ },
59
+ {
60
+ name: "Qwen 2.5 3B Turkish Reasoning",
61
+ path: "Chan-Y/TurkishReasoner-Qwen2.5-3B",
62
+ supportsImages: false
63
+ },
64
+ {
65
+ name: "Llama 3.1 8B Turkish Reasoning",
66
+ path: "Chan-Y/TurkishReasoner-Llama3.1-8B",
67
+ supportsImages: false
68
+ }
69
+ ];
70
+
71
+ export const DEFAULT_SYSTEM_PROMPT = `Sen kullanıcıların isteklerine Türkçe cevap veren bir asistansın ve sana bir problem verildi.
72
+ Problem hakkında düşün ve çalışmanı göster.
73
+ Çalışmanı <start_working_out> ve <end_working_out> arasına yerleştir.
74
+ Sonra, çözümünü <SOLUTION> ve </SOLUTION> arasına yerleştir.
75
+ Lütfen SADECE Türkçe kullan.`;
frontend/utils/api.ts ADDED
@@ -0,0 +1,223 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // API Configuration
2
+ const API_BASE_URL = import.meta.env.PROD ? '' : 'http://localhost:8000';
3
+
4
+ // 🎭 DUMMY MODE - Backend olmadan test için
5
+ const DUMMY_MODE = false; // Backend varsa false yapın
6
+
7
+ interface GenerateRequest {
8
+ model_name: string;
9
+ prompt: string;
10
+ system_prompt: string;
11
+ max_tokens: number;
12
+ temperature: number;
13
+ top_p: number;
14
+ top_k: number;
15
+ image?: string;
16
+ }
17
+
18
+ interface GenerateResponse {
19
+ generated_text: string;
20
+ model_used: string;
21
+ }
22
+
23
+ interface ProgressCallback {
24
+ status: 'loading' | 'generating' | 'ready' | 'error';
25
+ progress?: number;
26
+ message?: string;
27
+ }
28
+
29
+ interface GenerateOptions extends GenerateRequest {
30
+ onToken?: (token: string) => void;
31
+ onProgress?: (progress: ProgressCallback) => void;
32
+ }
33
+
34
+ // Dummy yanıtlar - farklı sorular için farklı cevaplar
35
+
36
+ const DUMMY_RESPONSES = [
37
+ `<start_working_out>
38
+ Kullanıcı 121 sayısının karekökünü soruyor.
39
+
40
+ Adım 1: Problemi Analiz Et
41
+ - Hedef: $\\sqrt{121}$ değerini bulmak.
42
+ - Hangi sayıyı kendisiyle çarparsam 121 eder?
43
+
44
+ Adım 2: Tahmin Yürütme
45
+ - 10 x 10 = 100 (Çok küçük)
46
+ - 12 x 12 = 144 (Çok büyük)
47
+ - Cevap 10 ile 12 arasında olmalı.
48
+
49
+ Adım 3: Hesaplama
50
+ - Sonu 1 ile biten bir sayı arıyorum (121).
51
+ - 11 sayısını deneyelim:
52
+ 11 x 10 = 110
53
+ 110 + 11 = 121
54
+
55
+ Sonuç doğrulandı.
56
+ <end_working_out>
57
+
58
+ <SOLUTION>
59
+ 121 sayısının karekökü **11**'dir.
60
+
61
+ Matematiksel işlemi şöyledir:
62
+ $$ \\sqrt{121} = 11 $$
63
+
64
+ Sağlaması:
65
+ 11 x 11 = 121
66
+ </SOLUTION>`
67
+ ];
68
+
69
+ /**
70
+ * Generate response with dummy data or from backend
71
+ */
72
+ export async function generateResponse(options: GenerateOptions): Promise<void> {
73
+ const {
74
+ model_name,
75
+ prompt,
76
+ system_prompt,
77
+ max_tokens,
78
+ temperature,
79
+ top_p,
80
+ top_k,
81
+ image,
82
+ onToken,
83
+ onProgress
84
+ } = options;
85
+
86
+ // 🎭 DUMMY MODE - Backend olmadan test
87
+ if (DUMMY_MODE) {
88
+ try {
89
+ onProgress?.({ status: 'loading', message: 'Model yükleniyor...' });
90
+
91
+ // Simüle edilmiş yükleme gecikmesi
92
+ await new Promise(resolve => setTimeout(resolve, 800));
93
+
94
+ onProgress?.({ status: 'generating', message: 'Yanıt oluşturuluyor...' });
95
+
96
+ // Rastgele bir dummy yanıt seç
97
+ const dummyText = DUMMY_RESPONSES[Math.floor(Math.random() * DUMMY_RESPONSES.length)];
98
+
99
+ // Karakterleri tek tek göster (typing effect)
100
+ let currentText = '';
101
+ const words = dummyText.split(' ');
102
+
103
+ for (let i = 0; i < words.length; i++) {
104
+ currentText += (i > 0 ? ' ' : '') + words[i];
105
+ onToken?.(currentText);
106
+
107
+ // Typing hızı - kelime başına 50-150ms
108
+ const delay = 50 + Math.random() * 100;
109
+ await new Promise(resolve => setTimeout(resolve, delay));
110
+ }
111
+
112
+ onProgress?.({ status: 'ready', message: 'Tamamlandı' });
113
+
114
+ } catch (error) {
115
+ console.error('Dummy response error:', error);
116
+ onProgress?.({
117
+ status: 'error',
118
+ message: 'Dummy yanıt oluşturulurken hata oluştu'
119
+ });
120
+ throw error;
121
+ }
122
+ return;
123
+ }
124
+
125
+ // 🚀 GERÇEK BACKEND MOD
126
+ try {
127
+ onProgress?.({ status: 'loading', message: 'Model yükleniyor...' });
128
+
129
+ const requestBody: GenerateRequest = {
130
+ model_name,
131
+ prompt,
132
+ system_prompt,
133
+ max_tokens,
134
+ temperature,
135
+ top_p,
136
+ top_k,
137
+ image
138
+ };
139
+
140
+ const response = await fetch(`${API_BASE_URL}/generate`, {
141
+ method: 'POST',
142
+ headers: {
143
+ 'Content-Type': 'application/json',
144
+ },
145
+ body: JSON.stringify(requestBody),
146
+ });
147
+
148
+ if (!response.ok) {
149
+ const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
150
+ throw new Error(error.detail || `HTTP error! status: ${response.status}`);
151
+ }
152
+
153
+ onProgress?.({ status: 'generating', message: 'Yanıt oluşturuluyor...' });
154
+
155
+ const data: GenerateResponse = await response.json();
156
+
157
+ if (onToken) {
158
+ const text = data.generated_text;
159
+ let currentText = '';
160
+
161
+ for (let i = 0; i < text.length; i++) {
162
+ currentText += text[i];
163
+ onToken(currentText);
164
+
165
+ if (i % 5 === 0) {
166
+ await new Promise(resolve => setTimeout(resolve, 10));
167
+ }
168
+ }
169
+ }
170
+
171
+ onProgress?.({ status: 'ready', message: 'Tamamlandı' });
172
+
173
+ } catch (error) {
174
+ console.error('Error generating response:', error);
175
+ onProgress?.({
176
+ status: 'error',
177
+ message: error instanceof Error ? error.message : 'Bir hata oluştu'
178
+ });
179
+ throw error;
180
+ }
181
+ }
182
+
183
+ /**
184
+ * Get available models from the backend
185
+ */
186
+ export async function getAvailableModels() {
187
+ if (DUMMY_MODE) {
188
+ // Dummy modeller
189
+ return [
190
+ { name: "Gemma 3 1B Turkish", path: "dummy/gemma-1b" },
191
+ { name: "Gemma 3 12B Turkish", path: "dummy/gemma-12b" },
192
+ ];
193
+ }
194
+
195
+ try {
196
+ const response = await fetch(`${API_BASE_URL}/models`);
197
+ if (!response.ok) {
198
+ throw new Error(`HTTP error! status: ${response.status}`);
199
+ }
200
+ const data = await response.json();
201
+ return data.models;
202
+ } catch (error) {
203
+ console.error('Error fetching models:', error);
204
+ throw error;
205
+ }
206
+ }
207
+
208
+ /**
209
+ * Health check for the backend API
210
+ */
211
+ export async function checkApiHealth(): Promise<boolean> {
212
+ if (DUMMY_MODE) {
213
+ return true; // Dummy modda her zaman sağlıklı
214
+ }
215
+
216
+ try {
217
+ const response = await fetch(`${API_BASE_URL}/`);
218
+ return response.ok;
219
+ } catch (error) {
220
+ console.error('API health check failed:', error);
221
+ return false;
222
+ }
223
+ }
frontend/utils/chatStorage.ts ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { Chat } from '../types';
2
+
3
+ const STORAGE_KEY = 'turkish-ai-chats';
4
+
5
+ export const saveChats = (chats: Chat[]): void => {
6
+ try {
7
+ localStorage.setItem(STORAGE_KEY, JSON.stringify(chats));
8
+ } catch (error) {
9
+ console.error('Error saving chats:', error);
10
+ }
11
+ };
12
+
13
+ export const loadChats = (): Chat[] => {
14
+ try {
15
+ const stored = localStorage.getItem(STORAGE_KEY);
16
+ return stored ? JSON.parse(stored) : [];
17
+ } catch (error) {
18
+ console.error('Error loading chats:', error);
19
+ return [];
20
+ }
21
+ };
22
+
23
+ export const createNewChat = (modelName: string): Chat => {
24
+ return {
25
+ id: Date.now().toString(),
26
+ title: 'Yeni Sohbet',
27
+ messages: [],
28
+ createdAt: Date.now(),
29
+ updatedAt: Date.now(),
30
+ modelName
31
+ };
32
+ };
33
+
34
+ export const updateChatTitle = (chat: Chat): string => {
35
+ const firstUserMessage = chat.messages.find(m => m.role === 'user');
36
+ if (firstUserMessage) {
37
+ const title = firstUserMessage.content.slice(0, 50);
38
+ return title.length < firstUserMessage.content.length ? `${title}...` : title;
39
+ }
40
+ return 'Yeni Sohbet';
41
+ };
42
+
frontend/utils/huggingface.ts ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Backend-based inference using FastAPI (Python) for local GPU inference
3
+ * This allows using the full Turkish fine-tuned models with PEFT/LoRA adapters
4
+ *
5
+ * Backend: backend/python_backend/main.py
6
+ * - FastAPI server
7
+ * - PyTorch + Transformers for model loading
8
+ * - PEFT for LoRA adapters (Gemma models)
9
+ * - Direct loading for Qwen/Llama merged models
10
+ */
11
+
12
+ // Backend API configuration
13
+ const BACKEND_URL = import.meta.env.VITE_BACKEND_URL || 'http://localhost:3000';
14
+
15
+ interface GenerateResponseParams {
16
+ modelPath: string;
17
+ systemPrompt: string;
18
+ userInput: string;
19
+ image?: string;
20
+ temperature?: number;
21
+ maxTokens?: number;
22
+ topP?: number;
23
+ topK?: number;
24
+ onToken?: (content: string) => void;
25
+ onProgress?: (progress: any) => void;
26
+ }
27
+
28
+ export async function generateResponse(params: GenerateResponseParams): Promise<void> {
29
+ const {
30
+ modelPath,
31
+ systemPrompt,
32
+ userInput,
33
+ // image, // TODO: Implement image support for multimodal models
34
+ temperature = 0.7,
35
+ maxTokens = 512,
36
+ topP = 0.95,
37
+ topK = 50,
38
+ onToken,
39
+ onProgress
40
+ } = params;
41
+
42
+ try {
43
+ // Notify progress: loading model
44
+ onProgress?.({ status: 'loading', progress: 0 });
45
+ console.log(`Starting inference with model: ${modelPath}`);
46
+
47
+ // Call backend API
48
+ const response = await fetch(`${BACKEND_URL}/api/inference/generate`, {
49
+ method: 'POST',
50
+ headers: {
51
+ 'Content-Type': 'application/json',
52
+ },
53
+ body: JSON.stringify({
54
+ modelPath,
55
+ systemPrompt,
56
+ userInput,
57
+ temperature,
58
+ maxTokens,
59
+ topP,
60
+ topK,
61
+ }),
62
+ });
63
+
64
+ if (!response.ok) {
65
+ throw new Error(`Backend error: ${response.statusText}`);
66
+ }
67
+
68
+ const result = await response.json();
69
+
70
+ if (!result.success) {
71
+ throw new Error(result.error || 'Inference failed');
72
+ }
73
+
74
+ // Update progress: generating
75
+ onProgress?.({ status: 'generating' });
76
+
77
+ // Send the response via onToken callback
78
+ onToken?.(result.response);
79
+
80
+ onProgress?.({ status: 'done' });
81
+ } catch (error) {
82
+ console.error('Error in generateResponse:', error);
83
+ onProgress?.({ status: 'error', error });
84
+ throw error;
85
+ }
86
+ }
87
+
88
+ /**
89
+ * Check backend health and Python environment
90
+ */
91
+ export async function checkBackendHealth(): Promise<{ healthy: boolean; message: string }> {
92
+ try {
93
+ const response = await fetch(`${BACKEND_URL}/api/inference/health`);
94
+ const result = await response.json();
95
+ return result;
96
+ } catch (error) {
97
+ return {
98
+ healthy: false,
99
+ message: `Cannot connect to backend at ${BACKEND_URL}. Make sure the backend is running.`,
100
+ };
101
+ }
102
+ }
frontend/vite-env.d.ts ADDED
@@ -0,0 +1 @@
 
 
1
+ /// <reference types="vite/client" />
frontend/vite.config.ts ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ import { defineConfig } from 'vite'
2
+ import react from '@vitejs/plugin-react'
3
+
4
+ export default defineConfig({
5
+ plugins: [react()],
6
+ })
requirements.txt ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ fastapi
2
+ uvicorn
3
+ pydantic
4
+ transformers
5
+ peft
6
+ torch
7
+ python-dotenv
8
+ accelerate
9
+ bitsandbytes
10
+ protobuf
11
+ sentencepiece
12
+ huggingface_hub