Update app.py
Browse files
app.py
CHANGED
|
@@ -6,88 +6,80 @@ import xml.etree.ElementTree as ET
|
|
| 6 |
import schedule
|
| 7 |
import time
|
| 8 |
import threading
|
| 9 |
-
from huggingface_hub import HfApi, create_repo, hf_hub_download
|
| 10 |
import warnings
|
| 11 |
-
import pandas as pd
|
| 12 |
from docx import Document
|
| 13 |
-
import spaces
|
| 14 |
from google.oauth2.service_account import Credentials
|
| 15 |
from googleapiclient.discovery import build
|
| 16 |
from googleapiclient.http import MediaIoBaseDownload
|
| 17 |
import io
|
| 18 |
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
| 19 |
-
warnings.simplefilter('ignore', InsecureRequestWarning)
|
| 20 |
from fastapi import FastAPI, Response
|
| 21 |
from fastapi.middleware.cors import CORSMiddleware
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
# Prompt dosyasını import et
|
| 25 |
from prompts import get_prompt_content_only
|
| 26 |
-
|
| 27 |
-
# Enhanced features import et (sadece temel özellikler)
|
| 28 |
-
from enhanced_features import (
|
| 29 |
-
initialize_enhanced_features, process_image_message,
|
| 30 |
-
handle_comparison_request
|
| 31 |
-
)
|
| 32 |
from image_renderer import extract_product_info_for_gallery, format_message_with_images
|
| 33 |
-
|
| 34 |
-
# Import conversation tracker
|
| 35 |
from conversation_tracker import add_conversation
|
| 36 |
|
| 37 |
-
# === JSON dashboard için yardımcılar ===
|
| 38 |
def save_conversations_json():
|
| 39 |
-
"""
|
| 40 |
-
conversation_tracker.load_conversations() çıktısını
|
| 41 |
-
hem köke hem public/ altına yazar. Dashboard bunları aynı origin'den çeker
|
| 42 |
-
ya da standalone HTML CORS açık /api uçlarından alır.
|
| 43 |
-
"""
|
| 44 |
try:
|
| 45 |
from conversation_tracker import load_conversations
|
| 46 |
convs = load_conversations()
|
| 47 |
-
# kök
|
| 48 |
with open("conversations.json", "w", encoding="utf-8") as f:
|
| 49 |
json.dump(convs, f, ensure_ascii=False, indent=2)
|
| 50 |
-
# public/
|
| 51 |
os.makedirs("public", exist_ok=True)
|
| 52 |
with open("public/conversations.json", "w", encoding="utf-8") as f:
|
| 53 |
json.dump(convs, f, ensure_ascii=False, indent=2)
|
| 54 |
-
print("conversations.json ve public/conversations.json güncellendi.")
|
| 55 |
except Exception as e:
|
| 56 |
print(f"conversations.json yazma hatası: {e}")
|
| 57 |
|
| 58 |
-
# Import smart warehouse with GPT intelligence and price
|
| 59 |
try:
|
| 60 |
-
from
|
| 61 |
-
|
| 62 |
-
except ImportError:
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 67 |
|
| 68 |
def get_warehouse_stock(product_name):
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
| 75 |
-
|
| 76 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
return get_warehouse_stock_old(product_name)
|
| 78 |
|
| 79 |
-
# OLD warehouse stock finder - general algorithm
|
| 80 |
def get_warehouse_stock_old(product_name):
|
| 81 |
-
"""Smart warehouse stock finder with general algorithm"""
|
| 82 |
try:
|
| 83 |
import re
|
| 84 |
-
|
| 85 |
-
# Get XML with retry
|
| 86 |
xml_text = None
|
| 87 |
for attempt in range(3):
|
| 88 |
try:
|
| 89 |
url = 'https://video.trek-turkey.com/bizimhesap-warehouse-xml-b2b-api-v2.php'
|
| 90 |
-
timeout_val = 10 + (attempt * 5)
|
| 91 |
response = requests.get(url, verify=False, timeout=timeout_val)
|
| 92 |
xml_text = response.text
|
| 93 |
break
|
|
@@ -96,30 +88,18 @@ def get_warehouse_stock_old(product_name):
|
|
| 96 |
return None
|
| 97 |
except Exception:
|
| 98 |
return None
|
| 99 |
-
|
| 100 |
-
# Turkish normalize
|
| 101 |
def normalize(text):
|
| 102 |
tr_map = {'ı': 'i', 'ğ': 'g', 'ü': 'u', 'ş': 's', 'ö': 'o', 'ç': 'c', 'İ': 'i', 'I': 'i'}
|
| 103 |
text = text.lower()
|
| 104 |
for tr, en in tr_map.items():
|
| 105 |
text = text.replace(tr, en)
|
| 106 |
return text
|
| 107 |
-
|
| 108 |
-
# Parse query
|
| 109 |
query = normalize(product_name.strip()).replace('(2026)', '').replace('(2025)', '').strip()
|
| 110 |
words = query.split()
|
| 111 |
-
|
| 112 |
-
# Find size markers (S, M, L, etc.)
|
| 113 |
sizes = ['s', 'm', 'l', 'xl', 'xs', 'xxl', 'ml']
|
| 114 |
size = next((w for w in words if w in sizes), None)
|
| 115 |
-
|
| 116 |
-
# Smart filtering: Keep only meaningful product identifiers
|
| 117 |
product_words = []
|
| 118 |
-
|
| 119 |
-
# If query is very short (like "hangi boyu"), skip it
|
| 120 |
-
if len(words) <= 2 and not any(w.isdigit() for w in words):
|
| 121 |
-
pass
|
| 122 |
-
else:
|
| 123 |
for word in words:
|
| 124 |
if word in sizes:
|
| 125 |
continue
|
|
@@ -139,49 +119,35 @@ def get_warehouse_stock_old(product_name):
|
|
| 139 |
if consonants <= 2:
|
| 140 |
continue
|
| 141 |
product_words.append(word)
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
# Find all Product blocks in XML
|
| 146 |
-
product_pattern = r'<Product>(.*?)</Product>'
|
| 147 |
-
all_products = re.findall(product_pattern, xml_text, re.DOTALL)
|
| 148 |
-
|
| 149 |
-
print(f"DEBUG - Total products in XML: {len(all_products)}")
|
| 150 |
-
|
| 151 |
best_match = None
|
| 152 |
for product_block in all_products:
|
| 153 |
-
name_match =
|
| 154 |
if not name_match:
|
| 155 |
continue
|
| 156 |
-
|
| 157 |
product_name_in_xml = name_match.group(1)
|
| 158 |
normalized_xml_name = normalize(product_name_in_xml)
|
| 159 |
-
|
| 160 |
match = True
|
| 161 |
for word in product_words:
|
| 162 |
if word not in normalized_xml_name:
|
| 163 |
if not (word.isdigit() and any(f"{prev}{word}" in normalized_xml_name or f"{prev} {word}" in normalized_xml_name for prev in product_words if not prev.isdigit())):
|
| 164 |
match = False
|
| 165 |
break
|
| 166 |
-
|
| 167 |
if match:
|
| 168 |
if size:
|
| 169 |
-
variant_match =
|
| 170 |
if variant_match:
|
| 171 |
variant = variant_match.group(1)
|
| 172 |
if variant.upper().startswith(f'{size.upper()}-'):
|
| 173 |
-
print(f"DEBUG - Found match: {product_name_in_xml} - {variant}")
|
| 174 |
best_match = product_block
|
| 175 |
break
|
| 176 |
else:
|
| 177 |
best_match = product_block
|
| 178 |
break
|
| 179 |
-
|
| 180 |
if best_match:
|
| 181 |
warehouse_info = []
|
| 182 |
-
|
| 183 |
-
warehouses = re.findall(warehouse_regex, best_match, re.DOTALL)
|
| 184 |
-
|
| 185 |
for wh_name, wh_stock in warehouses:
|
| 186 |
try:
|
| 187 |
stock = int(wh_stock.strip())
|
|
@@ -197,284 +163,186 @@ def get_warehouse_stock_old(product_name):
|
|
| 197 |
else:
|
| 198 |
display = wh_name
|
| 199 |
warehouse_info.append(f"{display}: Mevcut")
|
| 200 |
-
except:
|
| 201 |
pass
|
| 202 |
-
|
| 203 |
return warehouse_info if warehouse_info else ["Hiçbir mağazada mevcut değil"]
|
| 204 |
else:
|
| 205 |
-
print(f"DEBUG - No match found for {' '.join(product_words)}")
|
| 206 |
return ["Hiçbir mağazada mevcut değil"]
|
| 207 |
-
|
| 208 |
except Exception as e:
|
| 209 |
print(f"Warehouse error: {e}")
|
| 210 |
return None
|
| 211 |
|
| 212 |
-
|
| 213 |
-
|
| 214 |
-
|
| 215 |
-
|
| 216 |
-
|
| 217 |
-
|
| 218 |
-
|
| 219 |
-
|
| 220 |
-
|
| 221 |
-
|
| 222 |
-
|
| 223 |
-
|
| 224 |
-
|
| 225 |
-
|
| 226 |
-
|
| 227 |
-
|
| 228 |
-
|
| 229 |
-
|
| 230 |
-
|
| 231 |
-
if
|
| 232 |
-
|
| 233 |
-
|
| 234 |
-
|
| 235 |
-
|
| 236 |
-
|
| 237 |
-
|
| 238 |
-
|
| 239 |
-
else
|
| 240 |
-
print(f"HTTP hatası: {response.status_code}")
|
| 241 |
-
root = None
|
| 242 |
-
|
| 243 |
-
products = []
|
| 244 |
-
if root is not None:
|
| 245 |
-
for item in root.findall('item'):
|
| 246 |
-
rootlabel_elem = item.find('rootlabel')
|
| 247 |
-
stock_elem = item.find('stockAmount')
|
| 248 |
-
if rootlabel_elem is None or stock_elem is None:
|
| 249 |
-
continue
|
| 250 |
-
|
| 251 |
-
name_words = rootlabel_elem.text.lower().split()
|
| 252 |
-
name = name_words[0]
|
| 253 |
-
full_name = ' '.join(name_words)
|
| 254 |
-
|
| 255 |
-
stock_amount = "stokta" if stock_elem.text and stock_elem.text > '0' else "stokta değil"
|
| 256 |
-
|
| 257 |
-
if stock_amount == "stokta":
|
| 258 |
-
price_elem = item.find('priceTaxWithCur')
|
| 259 |
-
price_str = price_elem.text if price_elem is not None and price_elem.text else "Fiyat bilgisi yok"
|
| 260 |
-
price_eft_elem = item.find('priceEft')
|
| 261 |
-
price_eft_str = price_eft_elem.text if price_eft_elem is not None and price_eft_elem.text else ""
|
| 262 |
-
price_rebate_elem = item.find('priceRebateWithTax')
|
| 263 |
-
price_rebate_str = price_rebate_elem.text if price_rebate_elem is not None and price_rebate_elem.text else ""
|
| 264 |
-
price_rebate_money_order_elem = item.find('priceRebateWithMoneyOrderWithTax')
|
| 265 |
-
price_rebate_money_order_str = price_rebate_money_order_elem.text if price_rebate_money_order_elem is not None and price_rebate_money_order_elem.text else ""
|
| 266 |
-
|
| 267 |
-
try:
|
| 268 |
-
price_float = float(price_str)
|
| 269 |
-
if price_float > 200000:
|
| 270 |
-
price = str(round(price_float / 5000) * 5000)
|
| 271 |
-
elif price_float > 30000:
|
| 272 |
-
price = str(round(price_float / 1000) * 1000)
|
| 273 |
-
elif price_float > 10000:
|
| 274 |
-
price = str(round(price_float / 100) * 100)
|
| 275 |
-
else:
|
| 276 |
-
price = str(round(price_float / 10) * 10)
|
| 277 |
-
except (ValueError, TypeError):
|
| 278 |
-
price = price_str
|
| 279 |
-
|
| 280 |
-
if price_eft_str:
|
| 281 |
-
try:
|
| 282 |
-
price_eft_float = float(price_eft_str)
|
| 283 |
-
if price_eft_float > 200000:
|
| 284 |
-
price_eft = str(round(price_eft_float / 5000) * 5000)
|
| 285 |
-
elif price_eft_float > 30000:
|
| 286 |
-
price_eft = str(round(price_eft_float / 1000) * 1000)
|
| 287 |
-
elif price_eft_float > 10000:
|
| 288 |
-
price_eft = str(round(price_eft_float / 100) * 100)
|
| 289 |
-
else:
|
| 290 |
-
price_eft = str(round(price_eft_float / 10) * 10)
|
| 291 |
-
except (ValueError, TypeError):
|
| 292 |
-
price_eft = price_eft_str
|
| 293 |
-
else:
|
| 294 |
-
price_eft = ""
|
| 295 |
-
|
| 296 |
-
if price_rebate_str:
|
| 297 |
-
try:
|
| 298 |
-
price_rebate_float = float(price_rebate_str)
|
| 299 |
-
if price_rebate_float > 200000:
|
| 300 |
-
price_rebate = str(round(price_rebate_float / 5000) * 5000)
|
| 301 |
-
elif price_rebate_float > 30000:
|
| 302 |
-
price_rebate = str(round(price_rebate_float / 1000) * 1000)
|
| 303 |
-
elif price_rebate_float > 10000:
|
| 304 |
-
price_rebate = str(round(price_rebate_float / 100) * 100)
|
| 305 |
-
else:
|
| 306 |
-
price_rebate = str(round(price_rebate_float / 10) * 10)
|
| 307 |
-
except (ValueError, TypeError):
|
| 308 |
-
price_rebate = price_rebate_str
|
| 309 |
-
else:
|
| 310 |
-
price_rebate = ""
|
| 311 |
-
|
| 312 |
-
if price_rebate_money_order_str:
|
| 313 |
try:
|
| 314 |
-
|
| 315 |
-
if
|
| 316 |
-
|
| 317 |
-
elif
|
| 318 |
-
|
| 319 |
-
elif
|
| 320 |
-
|
| 321 |
else:
|
| 322 |
-
|
| 323 |
-
except
|
| 324 |
-
|
| 325 |
-
|
| 326 |
-
|
| 327 |
-
|
| 328 |
-
|
| 329 |
-
|
| 330 |
-
|
| 331 |
-
|
| 332 |
-
|
| 333 |
-
|
| 334 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 335 |
else:
|
| 336 |
-
product_info
|
| 337 |
-
|
| 338 |
-
product_info = [stock_amount]
|
| 339 |
-
|
| 340 |
-
products.append((name, product_info, full_name))
|
| 341 |
-
|
| 342 |
-
print(f"Toplam {len(products)} ürün yüklendi.")
|
| 343 |
-
|
| 344 |
-
# Initialize enhanced features
|
| 345 |
-
initialize_enhanced_features(OPENAI_API_KEY, products)
|
| 346 |
-
|
| 347 |
-
# İlk başta dosyaları oluştur (boşsa)
|
| 348 |
-
save_conversations_json()
|
| 349 |
-
|
| 350 |
-
# Initialize improved chatbot if available
|
| 351 |
-
improved_bot = None
|
| 352 |
-
if USE_IMPROVED_SEARCH:
|
| 353 |
-
try:
|
| 354 |
-
improved_bot = ImprovedChatbot(products)
|
| 355 |
-
print("Improved product search initialized successfully")
|
| 356 |
except Exception as e:
|
| 357 |
-
print(f"
|
| 358 |
-
|
| 359 |
|
| 360 |
-
# Google Drive API ayarları
|
| 361 |
-
GOOGLE_CREDENTIALS_PATH = os.getenv("GOOGLE_CREDENTIALS_PATH")
|
| 362 |
-
GOOGLE_FOLDER_ID = "1bE8aMj8-eFGftjMPOF8bKQJAhfHa0BN8"
|
| 363 |
-
|
| 364 |
-
# Global değişkenler
|
| 365 |
-
file_lock = threading.Lock()
|
| 366 |
-
history_lock = threading.Lock()
|
| 367 |
-
global_chat_history = []
|
| 368 |
-
document_content = ""
|
| 369 |
-
|
| 370 |
-
# Google Drive'dan döküman indirme fonksiyonu
|
| 371 |
def download_documents_from_drive():
|
| 372 |
global document_content
|
| 373 |
-
|
| 374 |
if not GOOGLE_CREDENTIALS_PATH:
|
| 375 |
print("Google credentials dosyası bulunamadı.")
|
| 376 |
return
|
| 377 |
-
|
| 378 |
try:
|
| 379 |
credentials = Credentials.from_service_account_file(GOOGLE_CREDENTIALS_PATH)
|
| 380 |
service = build('drive', 'v3', credentials=credentials)
|
| 381 |
-
|
| 382 |
-
results = service.files().list(
|
| 383 |
-
q=f"'{GOOGLE_FOLDER_ID}' in parents",
|
| 384 |
-
fields="files(id, name, mimeType)"
|
| 385 |
-
).execute()
|
| 386 |
-
|
| 387 |
files = results.get('files', [])
|
| 388 |
all_content = []
|
| 389 |
-
|
| 390 |
for file in files:
|
| 391 |
-
|
| 392 |
-
if file['mimeType'] == 'application/vnd.openxmlformats-officedocument.wordprocessingml.document':
|
| 393 |
request = service.files().get_media(fileId=file['id'])
|
| 394 |
file_io = io.BytesIO()
|
| 395 |
downloader = MediaIoBaseDownload(file_io, request)
|
| 396 |
-
|
| 397 |
done = False
|
| 398 |
while done is False:
|
| 399 |
status, done = downloader.next_chunk()
|
| 400 |
-
|
| 401 |
file_io.seek(0)
|
| 402 |
doc = Document(file_io)
|
| 403 |
-
|
| 404 |
content = f"\n=== {file['name']} ===\n"
|
| 405 |
for paragraph in doc.paragraphs:
|
| 406 |
if paragraph.text.strip():
|
| 407 |
content += paragraph.text + "\n"
|
| 408 |
-
|
| 409 |
all_content.append(content)
|
| 410 |
-
|
| 411 |
document_content = "\n".join(all_content)
|
| 412 |
-
print(f"Toplam {len(files)} döküman yüklendi.")
|
| 413 |
-
|
| 414 |
except Exception as e:
|
| 415 |
print(f"Google Drive'dan döküman indirme hatası: {e}")
|
| 416 |
|
| 417 |
-
# Döküman indirme işlemini arka planda çalıştır
|
| 418 |
-
document_thread = threading.Thread(target=download_documents_from_drive, daemon=True)
|
| 419 |
-
document_thread.start()
|
| 420 |
-
|
| 421 |
-
# Log dosyasını zamanla temizleme fonksiyonu
|
| 422 |
def clear_log_file():
|
| 423 |
try:
|
| 424 |
if os.path.exists(LOG_FILE):
|
| 425 |
with file_lock:
|
| 426 |
with open(LOG_FILE, 'w', encoding='utf-8') as f:
|
| 427 |
f.write("Log dosyası temizlendi.\n")
|
| 428 |
-
print("Log dosyası temizlendi.")
|
| 429 |
except Exception as e:
|
| 430 |
print(f"Log dosyası temizleme hatası: {e}")
|
| 431 |
|
| 432 |
-
# Zamanlanmış görevleri çalıştırma fonksiyonu
|
| 433 |
def run_scheduler(chat_history):
|
| 434 |
schedule.every().day.at("03:00").do(clear_log_file)
|
| 435 |
while True:
|
| 436 |
schedule.run_pending()
|
| 437 |
time.sleep(60)
|
| 438 |
|
| 439 |
-
# Chatbot fonksiyonu
|
| 440 |
def chatbot_fn(user_message, history, image=None):
|
| 441 |
if history is None:
|
| 442 |
history = []
|
| 443 |
-
|
| 444 |
warehouse_stock_data = None
|
| 445 |
-
print(f"DEBUG - Getting warehouse stock FIRST for: {user_message}")
|
| 446 |
try:
|
| 447 |
warehouse_stock_data = get_warehouse_stock(user_message)
|
| 448 |
-
if warehouse_stock_data:
|
| 449 |
-
print(f"DEBUG - Warehouse stock found: {warehouse_stock_data[:2]}...")
|
| 450 |
-
else:
|
| 451 |
-
print(f"DEBUG - No warehouse stock data returned")
|
| 452 |
except Exception as e:
|
| 453 |
print(f"DEBUG - Warehouse stock error at start: {e}")
|
| 454 |
-
|
| 455 |
try:
|
| 456 |
if image is not None:
|
| 457 |
user_message = process_image_message(image, user_message)
|
| 458 |
-
|
| 459 |
comparison_result = handle_comparison_request(user_message)
|
| 460 |
if comparison_result:
|
| 461 |
yield comparison_result
|
| 462 |
return
|
| 463 |
except Exception as e:
|
| 464 |
print(f"Enhanced features error: {e}")
|
| 465 |
-
|
| 466 |
try:
|
| 467 |
with file_lock:
|
| 468 |
with open(LOG_FILE, 'a', encoding='utf-8') as f:
|
| 469 |
f.write(f"User: {user_message}\n")
|
| 470 |
except Exception as e:
|
| 471 |
print(f"Dosya yazma hatası (Kullanıcı): {e}")
|
| 472 |
-
|
| 473 |
system_messages = get_prompt_content_only()
|
| 474 |
-
|
| 475 |
if document_content:
|
| 476 |
system_messages.append({"role": "system", "content": f"Dökümanlardan gelen bilgiler: {document_content}"})
|
| 477 |
-
|
| 478 |
product_found_improved = False
|
| 479 |
if USE_IMPROVED_SEARCH and improved_bot:
|
| 480 |
try:
|
|
@@ -486,41 +354,24 @@ def chatbot_fn(user_message, history, image=None):
|
|
| 486 |
for store_info in warehouse_stock_data:
|
| 487 |
warehouse_info += f"• {store_info}\n"
|
| 488 |
enhanced_response += warehouse_info
|
| 489 |
-
print(f"DEBUG - Added warehouse stock to improved search response")
|
| 490 |
elif warehouse_stock_data == ["Hiçbir mağazada mevcut değil"]:
|
| 491 |
enhanced_response += f"\n\n🏪 MAĞAZA STOK BİLGİLERİ: Hiçbir mağazada mevcut değil"
|
| 492 |
-
|
| 493 |
-
|
| 494 |
-
system_messages.append({
|
| 495 |
-
"role": "system",
|
| 496 |
-
"content": f"ÜRÜN BİLGİSİ:\n{enhanced_response}\n\nBu bilgileri kullanarak kullanıcıya yardımcı ol."
|
| 497 |
-
})
|
| 498 |
product_found_improved = True
|
| 499 |
except Exception as e:
|
| 500 |
print(f"Improved search error: {e}")
|
| 501 |
-
|
| 502 |
if not product_found_improved:
|
| 503 |
-
print(f"DEBUG chatbot_fn - Using warehouse stock data for: {user_message}")
|
| 504 |
if warehouse_stock_data and warehouse_stock_data != ["Hiçbir mağazada mevcut değil"]:
|
| 505 |
warehouse_info = f"🏪 MAĞAZA STOK BİLGİLERİ:\n"
|
| 506 |
for store_info in warehouse_stock_data:
|
| 507 |
warehouse_info += f"• {store_info}\n"
|
| 508 |
-
system_messages.append({
|
| 509 |
-
"role": "system",
|
| 510 |
-
"content": f"GÜNCEL STOK DURUMU:\n{warehouse_info}\n\nBu bilgileri kullanarak kullanıcıya hangi mağazada stok olduğunu söyle."
|
| 511 |
-
})
|
| 512 |
-
print(f"DEBUG - Using warehouse stock data")
|
| 513 |
elif warehouse_stock_data == ["Hiçbir mağazada mevcut değil"]:
|
| 514 |
-
system_messages.append({
|
| 515 |
-
"role": "system",
|
| 516 |
-
"content": "🏪 MAĞAZA STOK BİLGİLERİ: Sorduğunuz ürün hiçbir mağazada mevcut değil."
|
| 517 |
-
})
|
| 518 |
-
print(f"DEBUG - Product not available in any store")
|
| 519 |
-
else:
|
| 520 |
-
print(f"DEBUG - No warehouse stock data available")
|
| 521 |
-
|
| 522 |
messages = system_messages + history + [{"role": "user", "content": user_message}]
|
| 523 |
-
|
|
|
|
|
|
|
| 524 |
payload = {
|
| 525 |
"model": "gpt-4.1",
|
| 526 |
"messages": messages,
|
|
@@ -535,14 +386,11 @@ def chatbot_fn(user_message, history, image=None):
|
|
| 535 |
"Content-Type": "application/json",
|
| 536 |
"Authorization": f"Bearer {OPENAI_API_KEY}"
|
| 537 |
}
|
| 538 |
-
|
| 539 |
response = requests.post(API_URL, headers=headers, json=payload, stream=True)
|
| 540 |
if response.status_code != 200:
|
| 541 |
yield "Bir hata oluştu."
|
| 542 |
return
|
| 543 |
-
|
| 544 |
partial_response = ""
|
| 545 |
-
|
| 546 |
for chunk in response.iter_lines():
|
| 547 |
if not chunk:
|
| 548 |
continue
|
|
@@ -559,46 +407,49 @@ def chatbot_fn(user_message, history, image=None):
|
|
| 559 |
print(f"JSON parse hatası: {e} - Chunk: {chunk_str}")
|
| 560 |
elif chunk_str == "data: [DONE]":
|
| 561 |
break
|
| 562 |
-
|
| 563 |
final_response = extract_product_info_for_gallery(partial_response)
|
| 564 |
yield final_response
|
| 565 |
-
|
| 566 |
try:
|
| 567 |
with file_lock:
|
| 568 |
with open(LOG_FILE, 'a', encoding='utf-8') as f:
|
| 569 |
f.write(f"Bot: {partial_response}\n")
|
| 570 |
except Exception as e:
|
| 571 |
print(f"Dosya yazma hatası (Bot): {e}")
|
| 572 |
-
|
| 573 |
with history_lock:
|
| 574 |
global_chat_history.append({"role": "user", "content": user_message})
|
| 575 |
global_chat_history.append({"role": "assistant", "content": partial_response})
|
| 576 |
|
| 577 |
-
# Slow echo (test için)
|
| 578 |
def slow_echo(message, history):
|
| 579 |
for i in range(len(message)):
|
| 580 |
time.sleep(0.05)
|
| 581 |
yield "You typed: " + message[: i + 1]
|
| 582 |
|
| 583 |
-
# Kullanım modu
|
| 584 |
USE_SLOW_ECHO = False
|
| 585 |
chat_fn = slow_echo if USE_SLOW_ECHO else chatbot_fn
|
| 586 |
|
| 587 |
-
if not
|
| 588 |
-
|
| 589 |
-
|
| 590 |
-
|
| 591 |
-
|
| 592 |
-
|
| 593 |
-
|
| 594 |
-
|
| 595 |
-
|
| 596 |
-
|
| 597 |
-
|
| 598 |
-
|
| 599 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 600 |
custom_css = """
|
| 601 |
-
/* temel stiller (kısaltılmış) */
|
| 602 |
.gradio-container, .gradio-container * { font-family: 'Segoe UI', 'SF Pro Text', 'Roboto', -apple-system, BlinkMacSystemFont, 'Helvetica Neue', Arial, sans-serif !important; font-size: 0.6rem !important; }
|
| 603 |
"""
|
| 604 |
|
|
@@ -610,15 +461,8 @@ def enhanced_chatbot_fn(message, history, image):
|
|
| 610 |
with gr.Blocks(css=custom_css, theme="soft", title="Trek Asistanı", head=storage_js) as demo:
|
| 611 |
gr.Markdown("# 🚲 Trek Asistanı AI")
|
| 612 |
gr.Markdown("**Akıllı özellikler:** Ürün karşılaştırması ve detaylı ürün bilgileri sunuyorum.")
|
| 613 |
-
|
| 614 |
chatbot = gr.Chatbot(height=600, elem_id="chatbot", show_label=False, type="messages")
|
| 615 |
-
|
| 616 |
-
msg = gr.Textbox(
|
| 617 |
-
placeholder="Trek bisikletleri hakkında soru sorun...",
|
| 618 |
-
show_label=False,
|
| 619 |
-
elem_id="msg-input"
|
| 620 |
-
)
|
| 621 |
-
|
| 622 |
def respond(message, chat_history):
|
| 623 |
if not message.strip():
|
| 624 |
return "", chat_history
|
|
@@ -626,11 +470,9 @@ with gr.Blocks(css=custom_css, theme="soft", title="Trek Asistanı", head=storag
|
|
| 626 |
chat_history = []
|
| 627 |
chat_history.append({"role": "user", "content": message})
|
| 628 |
yield "", chat_history
|
| 629 |
-
|
| 630 |
formatted_history = []
|
| 631 |
for m in chat_history[:-1]:
|
| 632 |
formatted_history.append(m)
|
| 633 |
-
|
| 634 |
try:
|
| 635 |
response_generator = chatbot_fn(message, formatted_history, None)
|
| 636 |
response = ""
|
|
@@ -641,13 +483,11 @@ with gr.Blocks(css=custom_css, theme="soft", title="Trek Asistanı", head=storag
|
|
| 641 |
else:
|
| 642 |
chat_history[-1]["content"] = response
|
| 643 |
yield "", chat_history
|
| 644 |
-
|
| 645 |
try:
|
| 646 |
add_conversation(message, response)
|
| 647 |
-
save_conversations_json()
|
| 648 |
except Exception as e:
|
| 649 |
print(f"Error saving conversation: {e}")
|
| 650 |
-
|
| 651 |
except Exception as e:
|
| 652 |
error_msg = f"Üzgünüm, bir hata oluştu: {str(e)}"
|
| 653 |
print(f"Chat error: {e}")
|
|
@@ -656,18 +496,14 @@ with gr.Blocks(css=custom_css, theme="soft", title="Trek Asistanı", head=storag
|
|
| 656 |
else:
|
| 657 |
chat_history[-1]["content"] = error_msg
|
| 658 |
yield "", chat_history
|
| 659 |
-
|
| 660 |
msg.submit(respond, [msg, chatbot], [msg, chatbot], show_progress=True)
|
| 661 |
-
|
| 662 |
with gr.Accordion("📊 Konuşma Geçmişi (JSON)", open=False):
|
| 663 |
with gr.Row():
|
| 664 |
refresh_json_btn = gr.Button("🔄 Yenile", scale=1)
|
| 665 |
download_json_btn = gr.Button("💾 JSON İndir", scale=1)
|
| 666 |
view_dashboard_btn = gr.Button("📈 Dashboard'u Aç", scale=1)
|
| 667 |
-
|
| 668 |
json_display = gr.JSON(label="Konuşmalar", elem_id="json_viewer")
|
| 669 |
download_file = gr.File(label="İndir", visible=False)
|
| 670 |
-
|
| 671 |
def get_conversations_json():
|
| 672 |
from conversation_tracker import load_conversations
|
| 673 |
convs = load_conversations()
|
|
@@ -675,11 +511,9 @@ with gr.Blocks(css=custom_css, theme="soft", title="Trek Asistanı", head=storag
|
|
| 675 |
with open("temp_conversations.json", "w", encoding="utf-8") as f:
|
| 676 |
json_module.dump(convs, f, ensure_ascii=False, indent=2)
|
| 677 |
return convs
|
| 678 |
-
|
| 679 |
def download_conversations():
|
| 680 |
get_conversations_json()
|
| 681 |
return gr.update(visible=True, value="temp_conversations.json")
|
| 682 |
-
|
| 683 |
def open_dashboard():
|
| 684 |
return gr.HTML("""
|
| 685 |
<div style='padding: 20px; background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); border-radius: 10px; color: white;'>
|
|
@@ -690,24 +524,16 @@ with gr.Blocks(css=custom_css, theme="soft", title="Trek Asistanı", head=storag
|
|
| 690 |
</ol>
|
| 691 |
</div>
|
| 692 |
""")
|
| 693 |
-
|
| 694 |
refresh_json_btn.click(get_conversations_json, outputs=json_display)
|
| 695 |
download_conversations_btn = download_json_btn
|
| 696 |
download_conversations_btn.click(download_conversations, outputs=download_file)
|
| 697 |
view_dashboard_btn.click(open_dashboard, outputs=json_display)
|
| 698 |
demo.load(get_conversations_json, outputs=json_display)
|
| 699 |
|
| 700 |
-
# -------------------------
|
| 701 |
-
# CORS açık API + Gradio mount
|
| 702 |
-
# -------------------------
|
| 703 |
-
from fastapi import FastAPI, Response
|
| 704 |
-
from fastapi.middleware.cors import CORSMiddleware
|
| 705 |
-
import uvicorn
|
| 706 |
-
|
| 707 |
api = FastAPI()
|
| 708 |
api.add_middleware(
|
| 709 |
CORSMiddleware,
|
| 710 |
-
allow_origins=["*"],
|
| 711 |
allow_credentials=False,
|
| 712 |
allow_methods=["GET"],
|
| 713 |
allow_headers=["*"],
|
|
@@ -732,7 +558,4 @@ def api_public_conversations():
|
|
| 732 |
body, code = _read_json_safely("public/conversations.json")
|
| 733 |
return Response(content=body, media_type="application/json", status_code=code)
|
| 734 |
|
| 735 |
-
# Gradio arayüzünü FastAPI altına mount et
|
| 736 |
app = gr.mount_gradio_app(api, demo, path="/")
|
| 737 |
-
|
| 738 |
-
|
|
|
|
| 6 |
import schedule
|
| 7 |
import time
|
| 8 |
import threading
|
|
|
|
| 9 |
import warnings
|
|
|
|
| 10 |
from docx import Document
|
|
|
|
| 11 |
from google.oauth2.service_account import Credentials
|
| 12 |
from googleapiclient.discovery import build
|
| 13 |
from googleapiclient.http import MediaIoBaseDownload
|
| 14 |
import io
|
| 15 |
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
|
|
|
| 16 |
from fastapi import FastAPI, Response
|
| 17 |
from fastapi.middleware.cors import CORSMiddleware
|
| 18 |
+
warnings.simplefilter('ignore', InsecureRequestWarning)
|
|
|
|
|
|
|
| 19 |
from prompts import get_prompt_content_only
|
| 20 |
+
from enhanced_features import initialize_enhanced_features, process_image_message, handle_comparison_request
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
from image_renderer import extract_product_info_for_gallery, format_message_with_images
|
|
|
|
|
|
|
| 22 |
from conversation_tracker import add_conversation
|
| 23 |
|
|
|
|
| 24 |
def save_conversations_json():
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
try:
|
| 26 |
from conversation_tracker import load_conversations
|
| 27 |
convs = load_conversations()
|
|
|
|
| 28 |
with open("conversations.json", "w", encoding="utf-8") as f:
|
| 29 |
json.dump(convs, f, ensure_ascii=False, indent=2)
|
|
|
|
| 30 |
os.makedirs("public", exist_ok=True)
|
| 31 |
with open("public/conversations.json", "w", encoding="utf-8") as f:
|
| 32 |
json.dump(convs, f, ensure_ascii=False, indent=2)
|
|
|
|
| 33 |
except Exception as e:
|
| 34 |
print(f"conversations.json yazma hatası: {e}")
|
| 35 |
|
|
|
|
| 36 |
try:
|
| 37 |
+
from improved_chatbot import ImprovedChatbot
|
| 38 |
+
USE_IMPROVED_SEARCH = True
|
| 39 |
+
except ImportError as e:
|
| 40 |
+
print(f"DEBUG - Improved chatbot not available: {e}, using basic search")
|
| 41 |
+
USE_IMPROVED_SEARCH = False
|
| 42 |
+
|
| 43 |
+
LOG_FILE = '/data/chat_logs.txt' if os.path.exists('/data') else 'chat_logs.txt'
|
| 44 |
+
print(f"Dosya yolu: {os.path.abspath(LOG_FILE)}")
|
| 45 |
+
API_URL = "https://api.openai.com/v1/chat/completions"
|
| 46 |
+
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
| 47 |
+
if not OPENAI_API_KEY:
|
| 48 |
+
print("Hata: OPENAI_API_KEY çevre değişkeni ayarlanmamış!")
|
| 49 |
+
|
| 50 |
+
GOOGLE_CREDENTIALS_PATH = os.getenv("GOOGLE_CREDENTIALS_PATH")
|
| 51 |
+
GOOGLE_FOLDER_ID = "1bE8aMj8-eFGftjMPOF8bKQJAhfHa0BN8"
|
| 52 |
+
file_lock = threading.Lock()
|
| 53 |
+
history_lock = threading.Lock()
|
| 54 |
+
global_chat_history = []
|
| 55 |
+
document_content = ""
|
| 56 |
+
products = []
|
| 57 |
+
improved_bot = None
|
| 58 |
|
| 59 |
def get_warehouse_stock(product_name):
|
| 60 |
+
try:
|
| 61 |
+
from smart_warehouse_with_price import get_warehouse_stock_smart_with_price
|
| 62 |
+
s = get_warehouse_stock_smart_with_price(product_name)
|
| 63 |
+
if s:
|
| 64 |
+
return s
|
| 65 |
+
except Exception:
|
| 66 |
+
try:
|
| 67 |
+
from smart_warehouse import get_warehouse_stock_smart
|
| 68 |
+
s = get_warehouse_stock_smart(product_name)
|
| 69 |
+
if s:
|
| 70 |
+
return s
|
| 71 |
+
except Exception:
|
| 72 |
+
pass
|
| 73 |
return get_warehouse_stock_old(product_name)
|
| 74 |
|
|
|
|
| 75 |
def get_warehouse_stock_old(product_name):
|
|
|
|
| 76 |
try:
|
| 77 |
import re
|
|
|
|
|
|
|
| 78 |
xml_text = None
|
| 79 |
for attempt in range(3):
|
| 80 |
try:
|
| 81 |
url = 'https://video.trek-turkey.com/bizimhesap-warehouse-xml-b2b-api-v2.php'
|
| 82 |
+
timeout_val = 10 + (attempt * 5)
|
| 83 |
response = requests.get(url, verify=False, timeout=timeout_val)
|
| 84 |
xml_text = response.text
|
| 85 |
break
|
|
|
|
| 88 |
return None
|
| 89 |
except Exception:
|
| 90 |
return None
|
|
|
|
|
|
|
| 91 |
def normalize(text):
|
| 92 |
tr_map = {'ı': 'i', 'ğ': 'g', 'ü': 'u', 'ş': 's', 'ö': 'o', 'ç': 'c', 'İ': 'i', 'I': 'i'}
|
| 93 |
text = text.lower()
|
| 94 |
for tr, en in tr_map.items():
|
| 95 |
text = text.replace(tr, en)
|
| 96 |
return text
|
|
|
|
|
|
|
| 97 |
query = normalize(product_name.strip()).replace('(2026)', '').replace('(2025)', '').strip()
|
| 98 |
words = query.split()
|
|
|
|
|
|
|
| 99 |
sizes = ['s', 'm', 'l', 'xl', 'xs', 'xxl', 'ml']
|
| 100 |
size = next((w for w in words if w in sizes), None)
|
|
|
|
|
|
|
| 101 |
product_words = []
|
| 102 |
+
if len(words) > 2 or any(w.isdigit() for w in words):
|
|
|
|
|
|
|
|
|
|
|
|
|
| 103 |
for word in words:
|
| 104 |
if word in sizes:
|
| 105 |
continue
|
|
|
|
| 119 |
if consonants <= 2:
|
| 120 |
continue
|
| 121 |
product_words.append(word)
|
| 122 |
+
import re as _re
|
| 123 |
+
all_products = _re.findall(r'<Product>(.*?)</Product>', xml_text, _re.DOTALL)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 124 |
best_match = None
|
| 125 |
for product_block in all_products:
|
| 126 |
+
name_match = _re.search(r'<ProductName><!\[CDATA\[(.*?)\]\]></ProductName>', product_block)
|
| 127 |
if not name_match:
|
| 128 |
continue
|
|
|
|
| 129 |
product_name_in_xml = name_match.group(1)
|
| 130 |
normalized_xml_name = normalize(product_name_in_xml)
|
|
|
|
| 131 |
match = True
|
| 132 |
for word in product_words:
|
| 133 |
if word not in normalized_xml_name:
|
| 134 |
if not (word.isdigit() and any(f"{prev}{word}" in normalized_xml_name or f"{prev} {word}" in normalized_xml_name for prev in product_words if not prev.isdigit())):
|
| 135 |
match = False
|
| 136 |
break
|
|
|
|
| 137 |
if match:
|
| 138 |
if size:
|
| 139 |
+
variant_match = _re.search(r'<ProductVariant><!\[CDATA\[(.*?)\]\]></ProductVariant>', product_block)
|
| 140 |
if variant_match:
|
| 141 |
variant = variant_match.group(1)
|
| 142 |
if variant.upper().startswith(f'{size.upper()}-'):
|
|
|
|
| 143 |
best_match = product_block
|
| 144 |
break
|
| 145 |
else:
|
| 146 |
best_match = product_block
|
| 147 |
break
|
|
|
|
| 148 |
if best_match:
|
| 149 |
warehouse_info = []
|
| 150 |
+
warehouses = _re.findall(r'<Warehouse>.*?<Name><!\[CDATA\[(.*?)\]\]></Name>.*?<Stock>(.*?)</Stock>.*?</Warehouse>', best_match, _re.DOTALL)
|
|
|
|
|
|
|
| 151 |
for wh_name, wh_stock in warehouses:
|
| 152 |
try:
|
| 153 |
stock = int(wh_stock.strip())
|
|
|
|
| 163 |
else:
|
| 164 |
display = wh_name
|
| 165 |
warehouse_info.append(f"{display}: Mevcut")
|
| 166 |
+
except Exception:
|
| 167 |
pass
|
|
|
|
| 168 |
return warehouse_info if warehouse_info else ["Hiçbir mağazada mevcut değil"]
|
| 169 |
else:
|
|
|
|
| 170 |
return ["Hiçbir mağazada mevcut değil"]
|
|
|
|
| 171 |
except Exception as e:
|
| 172 |
print(f"Warehouse error: {e}")
|
| 173 |
return None
|
| 174 |
|
| 175 |
+
def load_products():
|
| 176 |
+
out = []
|
| 177 |
+
try:
|
| 178 |
+
url = 'https://www.trekbisiklet.com.tr/output/8582384479'
|
| 179 |
+
response = requests.get(url, verify=False, timeout=30)
|
| 180 |
+
if response.status_code != 200 or not response.content:
|
| 181 |
+
return out
|
| 182 |
+
root = ET.fromstring(response.content)
|
| 183 |
+
for item in root.findall('item'):
|
| 184 |
+
rootlabel_elem = item.find('rootlabel')
|
| 185 |
+
stock_elem = item.find('stockAmount')
|
| 186 |
+
if rootlabel_elem is None or stock_elem is None or rootlabel_elem.text is None:
|
| 187 |
+
continue
|
| 188 |
+
name_words = rootlabel_elem.text.lower().split()
|
| 189 |
+
if not name_words:
|
| 190 |
+
continue
|
| 191 |
+
name = name_words[0]
|
| 192 |
+
full_name = ' '.join(name_words)
|
| 193 |
+
stock_amount = "stokta" if stock_elem.text and stock_elem.text > '0' else "stokta değil"
|
| 194 |
+
if stock_amount == "stokta":
|
| 195 |
+
price_elem = item.find('priceTaxWithCur')
|
| 196 |
+
price_str = price_elem.text if price_elem is not None and price_elem.text else "Fiyat bilgisi yok"
|
| 197 |
+
price_eft_elem = item.find('priceEft')
|
| 198 |
+
price_eft_str = price_eft_elem.text if price_eft_elem is not None and price_eft_elem.text else ""
|
| 199 |
+
price_rebate_elem = item.find('priceRebateWithTax')
|
| 200 |
+
price_rebate_str = price_rebate_elem.text if price_rebate_elem is not None and price_rebate_elem.text else ""
|
| 201 |
+
price_rebate_money_order_elem = item.find('priceRebateWithMoneyOrderWithTax')
|
| 202 |
+
price_rebate_money_order_str = price_rebate_money_order_elem.text if price_rebate_money_order_elem is not None and price_rebate_money_order_elem.text else ""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 203 |
try:
|
| 204 |
+
price_float = float(price_str)
|
| 205 |
+
if price_float > 200000:
|
| 206 |
+
price = str(round(price_float / 5000) * 5000)
|
| 207 |
+
elif price_float > 30000:
|
| 208 |
+
price = str(round(price_float / 1000) * 1000)
|
| 209 |
+
elif price_float > 10000:
|
| 210 |
+
price = str(round(price_float / 100) * 100)
|
| 211 |
else:
|
| 212 |
+
price = str(round(price_float / 10) * 10)
|
| 213 |
+
except Exception:
|
| 214 |
+
price = price_str
|
| 215 |
+
if price_eft_str:
|
| 216 |
+
try:
|
| 217 |
+
price_eft_float = float(price_eft_str)
|
| 218 |
+
if price_eft_float > 200000:
|
| 219 |
+
price_eft = str(round(price_eft_float / 5000) * 5000)
|
| 220 |
+
elif price_eft_float > 30000:
|
| 221 |
+
price_eft = str(round(price_eft_float / 1000) * 1000)
|
| 222 |
+
elif price_eft_float > 10000:
|
| 223 |
+
price_eft = str(round(price_eft_float / 100) * 100)
|
| 224 |
+
else:
|
| 225 |
+
price_eft = str(round(price_eft_float / 10) * 10)
|
| 226 |
+
except Exception:
|
| 227 |
+
price_eft = price_eft_str
|
| 228 |
+
else:
|
| 229 |
+
price_eft = ""
|
| 230 |
+
if price_rebate_str:
|
| 231 |
+
try:
|
| 232 |
+
price_rebate_float = float(price_rebate_str)
|
| 233 |
+
if price_rebate_float > 200000:
|
| 234 |
+
price_rebate = str(round(price_rebate_float / 5000) * 5000)
|
| 235 |
+
elif price_rebate_float > 30000:
|
| 236 |
+
price_rebate = str(round(price_rebate_float / 1000) * 1000)
|
| 237 |
+
elif price_rebate_float > 10000:
|
| 238 |
+
price_rebate = str(round(price_rebate_float / 100) * 100)
|
| 239 |
+
else:
|
| 240 |
+
price_rebate = str(round(price_rebate_float / 10) * 10)
|
| 241 |
+
except Exception:
|
| 242 |
+
price_rebate = price_rebate_str
|
| 243 |
+
else:
|
| 244 |
+
price_rebate = ""
|
| 245 |
+
if price_rebate_money_order_str:
|
| 246 |
+
try:
|
| 247 |
+
price_rebate_money_order_float = float(price_rebate_money_order_str)
|
| 248 |
+
if price_rebate_money_order_float > 200000:
|
| 249 |
+
price_rebate_money_order = str(round(price_rebate_money_order_float / 5000) * 5000)
|
| 250 |
+
elif price_rebate_money_order_float > 30000:
|
| 251 |
+
price_rebate_money_order = str(round(price_rebate_money_order_float / 1000) * 1000)
|
| 252 |
+
elif price_rebate_money_order_float > 10000:
|
| 253 |
+
price_rebate_money_order = str(round(price_rebate_money_order_float / 100) * 100)
|
| 254 |
+
else:
|
| 255 |
+
price_rebate_money_order = str(round(price_rebate_money_order_float / 10) * 10)
|
| 256 |
+
except Exception:
|
| 257 |
+
price_rebate_money_order = price_rebate_money_order_str
|
| 258 |
+
else:
|
| 259 |
+
price_rebate_money_order = ""
|
| 260 |
+
product_url_elem = item.find('productLink')
|
| 261 |
+
product_url = product_url_elem.text if product_url_elem is not None and product_url_elem.text else ""
|
| 262 |
+
product_info = [stock_amount, price, product_url, price_eft, price_rebate, price_rebate_money_order]
|
| 263 |
+
image_elem = item.find('picture1Path')
|
| 264 |
+
if image_elem is not None and image_elem.text:
|
| 265 |
+
product_info.append(image_elem.text)
|
| 266 |
+
else:
|
| 267 |
+
product_info.append("")
|
| 268 |
else:
|
| 269 |
+
product_info = [stock_amount]
|
| 270 |
+
out.append((name, product_info, full_name))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 271 |
except Exception as e:
|
| 272 |
+
print(f"Ürün yükleme hatası: {e}")
|
| 273 |
+
return out
|
| 274 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 275 |
def download_documents_from_drive():
|
| 276 |
global document_content
|
|
|
|
| 277 |
if not GOOGLE_CREDENTIALS_PATH:
|
| 278 |
print("Google credentials dosyası bulunamadı.")
|
| 279 |
return
|
|
|
|
| 280 |
try:
|
| 281 |
credentials = Credentials.from_service_account_file(GOOGLE_CREDENTIALS_PATH)
|
| 282 |
service = build('drive', 'v3', credentials=credentials)
|
| 283 |
+
results = service.files().list(q=f"'{GOOGLE_FOLDER_ID}' in parents", fields="files(id, name, mimeType)").execute()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 284 |
files = results.get('files', [])
|
| 285 |
all_content = []
|
|
|
|
| 286 |
for file in files:
|
| 287 |
+
if file.get('mimeType') == 'application/vnd.openxmlformats-officedocument.wordprocessingml.document':
|
|
|
|
| 288 |
request = service.files().get_media(fileId=file['id'])
|
| 289 |
file_io = io.BytesIO()
|
| 290 |
downloader = MediaIoBaseDownload(file_io, request)
|
|
|
|
| 291 |
done = False
|
| 292 |
while done is False:
|
| 293 |
status, done = downloader.next_chunk()
|
|
|
|
| 294 |
file_io.seek(0)
|
| 295 |
doc = Document(file_io)
|
|
|
|
| 296 |
content = f"\n=== {file['name']} ===\n"
|
| 297 |
for paragraph in doc.paragraphs:
|
| 298 |
if paragraph.text.strip():
|
| 299 |
content += paragraph.text + "\n"
|
|
|
|
| 300 |
all_content.append(content)
|
|
|
|
| 301 |
document_content = "\n".join(all_content)
|
|
|
|
|
|
|
| 302 |
except Exception as e:
|
| 303 |
print(f"Google Drive'dan döküman indirme hatası: {e}")
|
| 304 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 305 |
def clear_log_file():
|
| 306 |
try:
|
| 307 |
if os.path.exists(LOG_FILE):
|
| 308 |
with file_lock:
|
| 309 |
with open(LOG_FILE, 'w', encoding='utf-8') as f:
|
| 310 |
f.write("Log dosyası temizlendi.\n")
|
|
|
|
| 311 |
except Exception as e:
|
| 312 |
print(f"Log dosyası temizleme hatası: {e}")
|
| 313 |
|
|
|
|
| 314 |
def run_scheduler(chat_history):
|
| 315 |
schedule.every().day.at("03:00").do(clear_log_file)
|
| 316 |
while True:
|
| 317 |
schedule.run_pending()
|
| 318 |
time.sleep(60)
|
| 319 |
|
|
|
|
| 320 |
def chatbot_fn(user_message, history, image=None):
|
| 321 |
if history is None:
|
| 322 |
history = []
|
|
|
|
| 323 |
warehouse_stock_data = None
|
|
|
|
| 324 |
try:
|
| 325 |
warehouse_stock_data = get_warehouse_stock(user_message)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 326 |
except Exception as e:
|
| 327 |
print(f"DEBUG - Warehouse stock error at start: {e}")
|
|
|
|
| 328 |
try:
|
| 329 |
if image is not None:
|
| 330 |
user_message = process_image_message(image, user_message)
|
|
|
|
| 331 |
comparison_result = handle_comparison_request(user_message)
|
| 332 |
if comparison_result:
|
| 333 |
yield comparison_result
|
| 334 |
return
|
| 335 |
except Exception as e:
|
| 336 |
print(f"Enhanced features error: {e}")
|
|
|
|
| 337 |
try:
|
| 338 |
with file_lock:
|
| 339 |
with open(LOG_FILE, 'a', encoding='utf-8') as f:
|
| 340 |
f.write(f"User: {user_message}\n")
|
| 341 |
except Exception as e:
|
| 342 |
print(f"Dosya yazma hatası (Kullanıcı): {e}")
|
|
|
|
| 343 |
system_messages = get_prompt_content_only()
|
|
|
|
| 344 |
if document_content:
|
| 345 |
system_messages.append({"role": "system", "content": f"Dökümanlardan gelen bilgiler: {document_content}"})
|
|
|
|
| 346 |
product_found_improved = False
|
| 347 |
if USE_IMPROVED_SEARCH and improved_bot:
|
| 348 |
try:
|
|
|
|
| 354 |
for store_info in warehouse_stock_data:
|
| 355 |
warehouse_info += f"• {store_info}\n"
|
| 356 |
enhanced_response += warehouse_info
|
|
|
|
| 357 |
elif warehouse_stock_data == ["Hiçbir mağazada mevcut değil"]:
|
| 358 |
enhanced_response += f"\n\n🏪 MAĞAZA STOK BİLGİLERİ: Hiçbir mağazada mevcut değil"
|
| 359 |
+
system_messages.append({"role": "system", "content": f"ÜRÜN BİLGİSİ:\n{enhanced_response}\n\nBu bilgileri kullanarak kullanıcıya yardımcı ol."})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 360 |
product_found_improved = True
|
| 361 |
except Exception as e:
|
| 362 |
print(f"Improved search error: {e}")
|
|
|
|
| 363 |
if not product_found_improved:
|
|
|
|
| 364 |
if warehouse_stock_data and warehouse_stock_data != ["Hiçbir mağazada mevcut değil"]:
|
| 365 |
warehouse_info = f"🏪 MAĞAZA STOK BİLGİLERİ:\n"
|
| 366 |
for store_info in warehouse_stock_data:
|
| 367 |
warehouse_info += f"• {store_info}\n"
|
| 368 |
+
system_messages.append({"role": "system", "content": f"GÜNCEL STOK DURUMU:\n{warehouse_info}\n\nBu bilgileri kullanarak kullanıcıya hangi mağazada stok olduğunu söyle."})
|
|
|
|
|
|
|
|
|
|
|
|
|
| 369 |
elif warehouse_stock_data == ["Hiçbir mağazada mevcut değil"]:
|
| 370 |
+
system_messages.append({"role": "system", "content": "🏪 MAĞAZA STOK BİLGİLERİ: Sorduğunuz ürün hiçbir mağazada mevcut değil."})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 371 |
messages = system_messages + history + [{"role": "user", "content": user_message}]
|
| 372 |
+
if not OPENAI_API_KEY:
|
| 373 |
+
yield "API anahtarı eksik."
|
| 374 |
+
return
|
| 375 |
payload = {
|
| 376 |
"model": "gpt-4.1",
|
| 377 |
"messages": messages,
|
|
|
|
| 386 |
"Content-Type": "application/json",
|
| 387 |
"Authorization": f"Bearer {OPENAI_API_KEY}"
|
| 388 |
}
|
|
|
|
| 389 |
response = requests.post(API_URL, headers=headers, json=payload, stream=True)
|
| 390 |
if response.status_code != 200:
|
| 391 |
yield "Bir hata oluştu."
|
| 392 |
return
|
|
|
|
| 393 |
partial_response = ""
|
|
|
|
| 394 |
for chunk in response.iter_lines():
|
| 395 |
if not chunk:
|
| 396 |
continue
|
|
|
|
| 407 |
print(f"JSON parse hatası: {e} - Chunk: {chunk_str}")
|
| 408 |
elif chunk_str == "data: [DONE]":
|
| 409 |
break
|
|
|
|
| 410 |
final_response = extract_product_info_for_gallery(partial_response)
|
| 411 |
yield final_response
|
|
|
|
| 412 |
try:
|
| 413 |
with file_lock:
|
| 414 |
with open(LOG_FILE, 'a', encoding='utf-8') as f:
|
| 415 |
f.write(f"Bot: {partial_response}\n")
|
| 416 |
except Exception as e:
|
| 417 |
print(f"Dosya yazma hatası (Bot): {e}")
|
|
|
|
| 418 |
with history_lock:
|
| 419 |
global_chat_history.append({"role": "user", "content": user_message})
|
| 420 |
global_chat_history.append({"role": "assistant", "content": partial_response})
|
| 421 |
|
|
|
|
| 422 |
def slow_echo(message, history):
|
| 423 |
for i in range(len(message)):
|
| 424 |
time.sleep(0.05)
|
| 425 |
yield "You typed: " + message[: i + 1]
|
| 426 |
|
|
|
|
| 427 |
USE_SLOW_ECHO = False
|
| 428 |
chat_fn = slow_echo if USE_SLOW_ECHO else chatbot_fn
|
| 429 |
|
| 430 |
+
if 'APP_INIT_DONE' not in globals():
|
| 431 |
+
APP_INIT_DONE = True
|
| 432 |
+
products = load_products()
|
| 433 |
+
print(f"Toplam {len(products)} ürün yüklendi.")
|
| 434 |
+
initialize_enhanced_features(OPENAI_API_KEY, products)
|
| 435 |
+
save_conversations_json()
|
| 436 |
+
if GOOGLE_CREDENTIALS_PATH:
|
| 437 |
+
document_thread = threading.Thread(target=download_documents_from_drive, daemon=True)
|
| 438 |
+
document_thread.start()
|
| 439 |
+
else:
|
| 440 |
+
print("Google credentials dosyası bulunamadı.")
|
| 441 |
+
if not USE_SLOW_ECHO:
|
| 442 |
+
scheduler_thread = threading.Thread(target=run_scheduler, args=(global_chat_history,), daemon=True)
|
| 443 |
+
scheduler_thread.start()
|
| 444 |
+
if USE_IMPROVED_SEARCH:
|
| 445 |
+
try:
|
| 446 |
+
improved_bot = ImprovedChatbot(products)
|
| 447 |
+
print("Improved product search initialized successfully")
|
| 448 |
+
except Exception as e:
|
| 449 |
+
print(f"Failed to initialize improved search: {e}")
|
| 450 |
+
USE_IMPROVED_SEARCH = False
|
| 451 |
+
|
| 452 |
custom_css = """
|
|
|
|
| 453 |
.gradio-container, .gradio-container * { font-family: 'Segoe UI', 'SF Pro Text', 'Roboto', -apple-system, BlinkMacSystemFont, 'Helvetica Neue', Arial, sans-serif !important; font-size: 0.6rem !important; }
|
| 454 |
"""
|
| 455 |
|
|
|
|
| 461 |
with gr.Blocks(css=custom_css, theme="soft", title="Trek Asistanı", head=storage_js) as demo:
|
| 462 |
gr.Markdown("# 🚲 Trek Asistanı AI")
|
| 463 |
gr.Markdown("**Akıllı özellikler:** Ürün karşılaştırması ve detaylı ürün bilgileri sunuyorum.")
|
|
|
|
| 464 |
chatbot = gr.Chatbot(height=600, elem_id="chatbot", show_label=False, type="messages")
|
| 465 |
+
msg = gr.Textbox(placeholder="Trek bisikletleri hakkında soru sorun...", show_label=False, elem_id="msg-input")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 466 |
def respond(message, chat_history):
|
| 467 |
if not message.strip():
|
| 468 |
return "", chat_history
|
|
|
|
| 470 |
chat_history = []
|
| 471 |
chat_history.append({"role": "user", "content": message})
|
| 472 |
yield "", chat_history
|
|
|
|
| 473 |
formatted_history = []
|
| 474 |
for m in chat_history[:-1]:
|
| 475 |
formatted_history.append(m)
|
|
|
|
| 476 |
try:
|
| 477 |
response_generator = chatbot_fn(message, formatted_history, None)
|
| 478 |
response = ""
|
|
|
|
| 483 |
else:
|
| 484 |
chat_history[-1]["content"] = response
|
| 485 |
yield "", chat_history
|
|
|
|
| 486 |
try:
|
| 487 |
add_conversation(message, response)
|
| 488 |
+
save_conversations_json()
|
| 489 |
except Exception as e:
|
| 490 |
print(f"Error saving conversation: {e}")
|
|
|
|
| 491 |
except Exception as e:
|
| 492 |
error_msg = f"Üzgünüm, bir hata oluştu: {str(e)}"
|
| 493 |
print(f"Chat error: {e}")
|
|
|
|
| 496 |
else:
|
| 497 |
chat_history[-1]["content"] = error_msg
|
| 498 |
yield "", chat_history
|
|
|
|
| 499 |
msg.submit(respond, [msg, chatbot], [msg, chatbot], show_progress=True)
|
|
|
|
| 500 |
with gr.Accordion("📊 Konuşma Geçmişi (JSON)", open=False):
|
| 501 |
with gr.Row():
|
| 502 |
refresh_json_btn = gr.Button("🔄 Yenile", scale=1)
|
| 503 |
download_json_btn = gr.Button("💾 JSON İndir", scale=1)
|
| 504 |
view_dashboard_btn = gr.Button("📈 Dashboard'u Aç", scale=1)
|
|
|
|
| 505 |
json_display = gr.JSON(label="Konuşmalar", elem_id="json_viewer")
|
| 506 |
download_file = gr.File(label="İndir", visible=False)
|
|
|
|
| 507 |
def get_conversations_json():
|
| 508 |
from conversation_tracker import load_conversations
|
| 509 |
convs = load_conversations()
|
|
|
|
| 511 |
with open("temp_conversations.json", "w", encoding="utf-8") as f:
|
| 512 |
json_module.dump(convs, f, ensure_ascii=False, indent=2)
|
| 513 |
return convs
|
|
|
|
| 514 |
def download_conversations():
|
| 515 |
get_conversations_json()
|
| 516 |
return gr.update(visible=True, value="temp_conversations.json")
|
|
|
|
| 517 |
def open_dashboard():
|
| 518 |
return gr.HTML("""
|
| 519 |
<div style='padding: 20px; background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); border-radius: 10px; color: white;'>
|
|
|
|
| 524 |
</ol>
|
| 525 |
</div>
|
| 526 |
""")
|
|
|
|
| 527 |
refresh_json_btn.click(get_conversations_json, outputs=json_display)
|
| 528 |
download_conversations_btn = download_json_btn
|
| 529 |
download_conversations_btn.click(download_conversations, outputs=download_file)
|
| 530 |
view_dashboard_btn.click(open_dashboard, outputs=json_display)
|
| 531 |
demo.load(get_conversations_json, outputs=json_display)
|
| 532 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 533 |
api = FastAPI()
|
| 534 |
api.add_middleware(
|
| 535 |
CORSMiddleware,
|
| 536 |
+
allow_origins=["*"],
|
| 537 |
allow_credentials=False,
|
| 538 |
allow_methods=["GET"],
|
| 539 |
allow_headers=["*"],
|
|
|
|
| 558 |
body, code = _read_json_safely("public/conversations.json")
|
| 559 |
return Response(content=body, media_type="application/json", status_code=code)
|
| 560 |
|
|
|
|
| 561 |
app = gr.mount_gradio_app(api, demo, path="/")
|
|
|
|
|
|