SamiKoen
Model adi gpt-5.5 olarak duzeltildi (chat-latest suffix'i kaldirildi); smart_warehouse dosyalarindan temperature/max_tokens kaldirildi
56e6173 | """Smart warehouse stock finder with price and link information""" | |
| import requests | |
| import re | |
| import os | |
| import json | |
| import xml.etree.ElementTree as ET | |
| import time | |
| # Cache configuration - 2 hours (reduced from 12 hours for more accurate results) | |
| CACHE_DURATION = 7200 # 2 hours | |
| cache = { | |
| 'warehouse_xml': {'data': None, 'time': 0}, | |
| 'trek_xml': {'data': None, 'time': 0}, | |
| 'products_summary': {'data': None, 'time': 0}, | |
| 'search_results': {} # Cache for specific searches | |
| } | |
| def get_cached_trek_xml(): | |
| """Get Trek XML with 12-hour caching""" | |
| current_time = time.time() | |
| if cache['trek_xml']['data'] and (current_time - cache['trek_xml']['time'] < CACHE_DURATION): | |
| cache_age = (current_time - cache['trek_xml']['time']) / 60 # in minutes | |
| return cache['trek_xml']['data'] | |
| try: | |
| url = 'https://www.trekbisiklet.com.tr/output/8582384479' | |
| response = requests.get(url, verify=False, timeout=10) | |
| if response.status_code == 200: | |
| cache['trek_xml']['data'] = response.content | |
| cache['trek_xml']['time'] = current_time | |
| return response.content | |
| else: | |
| return None | |
| except Exception as e: | |
| return None | |
| def apply_price_rounding(price_str): | |
| """Apply the same price rounding formula used in app.py""" | |
| if not price_str: | |
| return price_str | |
| try: | |
| price_float = float(price_str) | |
| if price_float > 200000: | |
| return str(round(price_float / 5000) * 5000) | |
| elif price_float > 30000: | |
| return str(round(price_float / 1000) * 1000) | |
| elif price_float > 10000: | |
| return str(round(price_float / 100) * 100) | |
| else: | |
| return str(round(price_float / 10) * 10) | |
| except: | |
| return price_str | |
| def get_product_price_and_link_by_sku(product_code): | |
| """Get price and link from Trek XML using improved SKU matching with new XML fields | |
| Uses: stockCode, rootProductStockCode, isOptionOfAProduct, isOptionedProduct | |
| Level 1: Search variants by stockCode where isOptionOfAProduct=1 | |
| Level 2: Search main products by stockCode where isOptionOfAProduct=0 | |
| Level 3: Search by rootProductStockCode for variant-to-main mapping | |
| """ | |
| try: | |
| # Import XML parsing for cleaner approach | |
| import xml.etree.ElementTree as ET | |
| # Get cached Trek XML | |
| xml_content = get_cached_trek_xml() | |
| if not xml_content: | |
| return None, None | |
| # Convert bytes to string if needed | |
| if isinstance(xml_content, bytes): | |
| xml_content = xml_content.decode('utf-8') | |
| # Parse XML properly instead of regex | |
| try: | |
| root = ET.fromstring(xml_content) | |
| except: | |
| # Fallback to regex if XML parsing fails | |
| return get_product_price_and_link_by_sku_regex(product_code) | |
| # Level 1: Search variants first (isOptionOfAProduct=1) | |
| for item in root.findall('.//item'): | |
| is_option_element = item.find('isOptionOfAProduct') | |
| stock_code_element = item.find('stockCode') | |
| if (is_option_element is not None and is_option_element.text == '1' and | |
| stock_code_element is not None and stock_code_element.text and stock_code_element.text.strip() == product_code): | |
| price_element = item.find('priceTaxWithCur') | |
| link_element = item.find('productLink') | |
| if price_element is not None and link_element is not None: | |
| rounded_price = apply_price_rounding(price_element.text) | |
| return rounded_price, link_element.text | |
| # Level 2: Search main products (isOptionOfAProduct=0) | |
| for item in root.findall('.//item'): | |
| is_option_element = item.find('isOptionOfAProduct') | |
| stock_code_element = item.find('stockCode') | |
| if (is_option_element is not None and is_option_element.text == '0' and | |
| stock_code_element is not None and stock_code_element.text and stock_code_element.text.strip() == product_code): | |
| price_element = item.find('priceTaxWithCur') | |
| link_element = item.find('productLink') | |
| if price_element is not None and link_element is not None: | |
| rounded_price = apply_price_rounding(price_element.text) | |
| return rounded_price, link_element.text | |
| # Level 3: Search by rootProductStockCode (variant parent lookup) | |
| for item in root.findall('.//item'): | |
| root_stock_element = item.find('rootProductStockCode') | |
| if (root_stock_element is not None and root_stock_element.text and root_stock_element.text.strip() == product_code): | |
| price_element = item.find('priceTaxWithCur') | |
| link_element = item.find('productLink') | |
| if price_element is not None and link_element is not None: | |
| rounded_price = apply_price_rounding(price_element.text) | |
| return rounded_price, link_element.text | |
| # Not found | |
| return None, None | |
| except Exception as e: | |
| return None, None | |
| def get_product_price_and_link_by_sku_regex(product_code): | |
| """Fallback regex method for SKU lookup if XML parsing fails""" | |
| try: | |
| xml_content = get_cached_trek_xml() | |
| if isinstance(xml_content, bytes): | |
| xml_content = xml_content.decode('utf-8') | |
| # Level 1: Search in variants first (isOptionOfAProduct=1) | |
| variant_pattern = rf'<isOptionOfAProduct>1</isOptionOfAProduct>.*?<stockCode><!\[CDATA\[{re.escape(product_code)}\]\]></stockCode>.*?(?=<item>|$)' | |
| variant_match = re.search(variant_pattern, xml_content, re.DOTALL) | |
| if variant_match: | |
| section = variant_match.group(0) | |
| price_match = re.search(r'<price><!\[CDATA\[(.*?)\]\]></price>', section) | |
| link_match = re.search(r'<producturl><!\[CDATA\[(.*?)\]\]></producturl>', section) | |
| if price_match and link_match: | |
| rounded_price = apply_price_rounding(price_match.group(1)) | |
| return rounded_price, link_match.group(1) | |
| # Level 2: Search in main products (isOptionOfAProduct=0) | |
| main_pattern = rf'<isOptionOfAProduct>0</isOptionOfAProduct>.*?<stockCode><!\[CDATA\[{re.escape(product_code)}\]\]></stockCode>.*?(?=<item>|$)' | |
| main_match = re.search(main_pattern, xml_content, re.DOTALL) | |
| if main_match: | |
| section = main_match.group(0) | |
| price_match = re.search(r'<price><!\[CDATA\[(.*?)\]\]></price>', section) | |
| link_match = re.search(r'<producturl><!\[CDATA\[(.*?)\]\]></producturl>', section) | |
| if price_match and link_match: | |
| rounded_price = apply_price_rounding(price_match.group(1)) | |
| return rounded_price, link_match.group(1) | |
| return None, None | |
| except Exception as e: | |
| return None, None | |
| def get_product_price_and_link(product_name, variant=None): | |
| """Get price and link from Trek website XML""" | |
| try: | |
| # Get cached Trek XML | |
| xml_content = get_cached_trek_xml() | |
| if not xml_content: | |
| return None, None | |
| root = ET.fromstring(xml_content) | |
| # Turkish character normalization FIRST (before lower()) | |
| tr_map = { | |
| 'İ': 'i', 'I': 'i', 'ı': 'i', # All I variations to i | |
| 'Ğ': 'g', 'ğ': 'g', | |
| 'Ü': 'u', 'ü': 'u', | |
| 'Ş': 's', 'ş': 's', | |
| 'Ö': 'o', 'ö': 'o', | |
| 'Ç': 'c', 'ç': 'c' | |
| } | |
| # Apply normalization to original (before lower) | |
| search_name_normalized = product_name | |
| search_variant_normalized = variant if variant else "" | |
| for tr, en in tr_map.items(): | |
| search_name_normalized = search_name_normalized.replace(tr, en) | |
| search_variant_normalized = search_variant_normalized.replace(tr, en) | |
| # Now lowercase | |
| search_name = search_name_normalized.lower() | |
| search_variant = search_variant_normalized.lower() | |
| best_match = None | |
| best_score = 0 | |
| # Clean search name - remove year and parentheses | |
| clean_search = re.sub(r'\s*\(\d{4}\)\s*', '', search_name).strip() | |
| for item in root.findall('item'): | |
| # Get product name | |
| rootlabel_elem = item.find('rootlabel') | |
| if rootlabel_elem is None or not rootlabel_elem.text: | |
| continue | |
| item_name = rootlabel_elem.text.lower() | |
| for tr, en in tr_map.items(): | |
| item_name = item_name.replace(tr, en) | |
| # Clean item name too | |
| clean_item = re.sub(r'\s*\(\d{4}\)\s*', '', item_name).strip() | |
| # Calculate match score with priority for exact matches | |
| score = 0 | |
| # Exact match gets highest priority | |
| if clean_search == clean_item: | |
| score += 100 | |
| # Check if starts with exact product name (e.g., "fx 2" in "fx 2 kirmizi") | |
| elif clean_item.startswith(clean_search + " ") or clean_item == clean_search: | |
| score += 50 | |
| else: | |
| # Partial matching | |
| name_parts = clean_search.split() | |
| for part in name_parts: | |
| if part in clean_item: | |
| score += 1 | |
| # Check variant if specified | |
| if variant and search_variant in item_name: | |
| score += 2 # Variant match is important | |
| if score > best_score: | |
| best_score = score | |
| best_match = item | |
| if best_match and best_score > 0: | |
| # Extract price | |
| price_elem = best_match.find('priceTaxWithCur') | |
| price = price_elem.text if price_elem is not None and price_elem.text else None | |
| # Round price | |
| if price: | |
| try: | |
| price_float = float(price) | |
| if price_float > 200000: | |
| rounded = round(price_float / 5000) * 5000 | |
| price = f"{int(rounded):,}".replace(',', '.') + " TL" | |
| elif price_float > 30000: | |
| rounded = round(price_float / 1000) * 1000 | |
| price = f"{int(rounded):,}".replace(',', '.') + " TL" | |
| elif price_float > 10000: | |
| rounded = round(price_float / 100) * 100 | |
| price = f"{int(rounded):,}".replace(',', '.') + " TL" | |
| else: | |
| rounded = round(price_float / 10) * 10 | |
| price = f"{int(rounded):,}".replace(',', '.') + " TL" | |
| except: | |
| price = f"{price} TL" | |
| # Extract link (field name is productLink, not productUrl!) | |
| link_elem = best_match.find('productLink') | |
| link = link_elem.text if link_elem is not None and link_elem.text else None | |
| return price, link | |
| return None, None | |
| except Exception as e: | |
| return None, None | |
| def get_cached_warehouse_xml(): | |
| """Get warehouse XML with 12-hour caching""" | |
| current_time = time.time() | |
| if cache['warehouse_xml']['data'] and (current_time - cache['warehouse_xml']['time'] < CACHE_DURATION): | |
| cache_age = (current_time - cache['warehouse_xml']['time']) / 60 # in minutes | |
| return cache['warehouse_xml']['data'] | |
| for attempt in range(3): | |
| try: | |
| url = 'https://video.trek-turkey.com/bizimhesap-warehouse-xml-b2b-api-v2.php' | |
| timeout_val = 10 + (attempt * 5) | |
| response = requests.get(url, verify=False, timeout=timeout_val) | |
| xml_text = response.text | |
| cache['warehouse_xml']['data'] = xml_text | |
| cache['warehouse_xml']['time'] = current_time | |
| return xml_text | |
| except requests.exceptions.Timeout: | |
| if attempt == 2: | |
| return None | |
| except Exception as e: | |
| return None | |
| return None | |
| def get_warehouse_stock_smart_with_price(user_message, previous_result=None): | |
| """Enhanced smart warehouse search with price and link info""" | |
| # Canlı destek / müşteri temsilcisi talepleri - ÜRÜN ARAMASI YAPMA | |
| live_support_phrases = [ | |
| 'müşteri bağla', 'canlı bağla', 'temsilci', 'yetkili', 'gerçek kişi', | |
| 'insan ile', 'operatör', 'canlı destek', 'bağlayın', 'bağlar mısın', | |
| 'görüşmek istiyorum', 'konuşmak istiyorum', 'yetkiliye bağla', | |
| 'müşteri hizmetleri', 'çağrı merkezi', 'santral', 'bağla' | |
| ] | |
| clean_message = user_message.lower().strip() | |
| for phrase in live_support_phrases: | |
| if phrase in clean_message: | |
| return None # Ürün araması yapma, GPT'ye bırak | |
| # Filter out common non-product words and responses | |
| non_product_words = [ | |
| 'süper', 'harika', 'güzel', 'teşekkürler', 'teşekkür', 'tamam', 'olur', | |
| 'evet', 'hayır', 'merhaba', 'selam', 'iyi', 'kötü', 'fena', 'muhteşem', | |
| 'mükemmel', 'berbat', 'idare eder', 'olabilir', 'değil', 'var', 'yok', | |
| 'anladım', 'anlaşıldı', 'peki', 'tamamdır', 'ok', 'okay', 'aynen', | |
| 'kesinlikle', 'elbette', 'tabii', 'tabiki', 'doğru', 'yanlış' | |
| ] | |
| # Check if message is just a simple response | |
| if clean_message in non_product_words: | |
| return None | |
| # Brand keywords that should ALWAYS trigger product search regardless of length | |
| brand_keywords = ['gobik', 'trek', 'bontrager', 'kask', 'shimano', 'sram', 'garmin', 'wahoo'] | |
| # Check if message contains a brand keyword | |
| contains_brand = any(brand in clean_message for brand in brand_keywords) | |
| # Check if it's a single word that's likely not a product | |
| # BUT allow if it contains a known brand | |
| if not contains_brand and len(clean_message.split()) == 1 and len(clean_message) < 5: | |
| # Short single words are usually not product names | |
| return None | |
| # Check if this is a question rather than a product search | |
| # BUT skip this check if message contains a known brand | |
| question_indicators = [ | |
| 'musun', 'müsün', 'misin', 'mısın', 'miyim', 'mıyım', | |
| 'musunuz', 'müsünüz', 'misiniz', 'mısınız', | |
| 'neden', 'nasıl', 'ne zaman', 'kim', 'nerede', 'nereye', | |
| 'ulaşamıyor', 'yapamıyor', 'gönderemiyor', 'edemiyor', | |
| '?' | |
| ] | |
| # If message contains question indicators, it's likely not a product search | |
| # EXCEPTION: If message contains a brand keyword, still search for products | |
| if not contains_brand: | |
| for indicator in question_indicators: | |
| if indicator in clean_message: | |
| return None | |
| # Normalize cache key for consistent caching (Turkish chars + lowercase) | |
| def normalize_for_cache(text): | |
| """Normalize text for cache key""" | |
| tr_map = {'İ': 'i', 'I': 'i', 'ı': 'i', 'Ğ': 'g', 'ğ': 'g', 'Ü': 'u', 'ü': 'u', | |
| 'Ş': 's', 'ş': 's', 'Ö': 'o', 'ö': 'o', 'Ç': 'c', 'ç': 'c'} | |
| for tr, en in tr_map.items(): | |
| text = text.replace(tr, en) | |
| return text.lower().strip() | |
| # Check search cache first | |
| cache_key = normalize_for_cache(user_message) | |
| current_time = time.time() | |
| if cache_key in cache['search_results']: | |
| cached = cache['search_results'][cache_key] | |
| if current_time - cached['time'] < CACHE_DURATION: | |
| cache_age = (current_time - cached['time']) / 60 # in minutes | |
| return cached['data'] | |
| else: | |
| pass | |
| OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") | |
| # Check if user is asking about specific warehouse | |
| warehouse_keywords = { | |
| 'caddebostan': 'Caddebostan', | |
| 'ortaköy': 'Ortaköy', | |
| 'ortakoy': 'Ortaköy', | |
| 'alsancak': 'Alsancak', | |
| 'izmir': 'Alsancak', | |
| 'bahçeköy': 'Bahçeköy', | |
| 'bahcekoy': 'Bahçeköy', | |
| 'sarıyer': 'Bahçeköy', | |
| 'sariyer': 'Bahçeköy' | |
| } | |
| user_lower = user_message.lower() | |
| asked_warehouse = None | |
| for keyword, warehouse in warehouse_keywords.items(): | |
| if keyword in user_lower: | |
| asked_warehouse = warehouse | |
| break | |
| # Get cached XML data | |
| xml_text = get_cached_warehouse_xml() | |
| if not xml_text: | |
| return None | |
| # Extract product blocks | |
| product_pattern = r'<Product>(.*?)</Product>' | |
| all_products = re.findall(product_pattern, xml_text, re.DOTALL) | |
| # Create simplified product list for GPT | |
| products_summary = [] | |
| for i, product_block in enumerate(all_products): | |
| name_match = re.search(r'<ProductName><!\[CDATA\[(.*?)\]\]></ProductName>', product_block) | |
| variant_match = re.search(r'<ProductVariant><!\[CDATA\[(.*?)\]\]></ProductVariant>', product_block) | |
| if name_match: | |
| warehouses_with_stock = [] | |
| warehouse_regex = r'<Warehouse>.*?<Name><!\[CDATA\[(.*?)\]\]></Name>.*?<Stock>(.*?)</Stock>.*?</Warehouse>' | |
| warehouses = re.findall(warehouse_regex, product_block, re.DOTALL) | |
| for wh_name, wh_stock in warehouses: | |
| try: | |
| if int(wh_stock.strip()) > 0: | |
| warehouses_with_stock.append(wh_name) | |
| except: | |
| pass | |
| product_info = { | |
| "index": i, | |
| "name": name_match.group(1), | |
| "variant": variant_match.group(1) if variant_match else "", | |
| "warehouses": warehouses_with_stock | |
| } | |
| products_summary.append(product_info) | |
| # Prepare warehouse filter if needed | |
| warehouse_filter = "" | |
| if asked_warehouse: | |
| warehouse_filter = f"\nIMPORTANT: User is asking specifically about {asked_warehouse} warehouse. Only return products available in that warehouse." | |
| # Debug logging | |
| # Check if the target product exists | |
| # Normalize Turkish characters for comparison | |
| def normalize_turkish(text): | |
| text = text.upper() | |
| replacements = {'I': 'İ', 'Ç': 'C', 'Ş': 'S', 'Ğ': 'G', 'Ü': 'U', 'Ö': 'O'} | |
| # Also try with İ -> I conversion | |
| text2 = text.replace('İ', 'I') | |
| return text, text2 | |
| search_term = user_message.upper() | |
| search_norm1, search_norm2 = normalize_turkish(search_term) | |
| matching_products = [] | |
| for p in products_summary: | |
| p_name = p['name'].upper() | |
| # Check both original and normalized versions | |
| if (search_term in p_name or | |
| search_norm1 in p_name or | |
| search_norm2 in p_name or | |
| search_term.replace('I', 'İ') in p_name): | |
| matching_products.append(p) | |
| if matching_products: | |
| pass | |
| else: | |
| pass | |
| # GPT-5 prompt with enhanced instructions | |
| smart_prompt = f"""User is asking: "{user_message}" | |
| FIRST CHECK: Is this actually a product search? | |
| - If the message is a question about the system, service, or a general inquiry, return: -1 | |
| - If the message contains "musun", "misin", "neden", "nasıl", etc. it's likely NOT a product search | |
| - Only proceed if this looks like a genuine product name or model | |
| Find ALL products that match this query from the list below. | |
| If user asks about specific size (S, M, L, XL, XXL, SMALL, MEDIUM, LARGE, X-LARGE), return only that size. | |
| If user asks generally (without size), return ALL variants of the product. | |
| {warehouse_filter} | |
| CRITICAL TURKISH CHARACTER RULES: | |
| - "MARLIN" and "MARLİN" are the SAME product (Turkish İ vs I) | |
| - Treat these as equivalent: I/İ/ı, Ö/ö, Ü/ü, Ş/ş, Ğ/ğ, Ç/ç | |
| - If user writes "Marlin", also match "MARLİN" in the list | |
| IMPORTANT BRAND AND PRODUCT TYPE RULES: | |
| - GOBIK: Spanish textile brand we import. When user asks about "gobik", return ALL products with "GOBIK" in the name. | |
| - Product names contain type information: FORMA (jersey/cycling shirt), TAYT (tights), İÇLİK (base layer), YAĞMURLUK (raincoat), etc. | |
| - Understand Turkish/English terms: | |
| * "erkek forma" / "men's jersey" -> Find products with FORMA in name | |
| * "tayt" / "tights" -> Find products with TAYT in name | |
| * "içlik" / "base layer" -> Find products with İÇLİK in name | |
| * "yağmurluk" / "raincoat" -> Find products with YAĞMURLUK in name | |
| - Gender: UNISEX means for both men and women. If no gender specified, it's typically men's. | |
| Products list (with warehouse availability): | |
| {json.dumps(products_summary, ensure_ascii=False, indent=2)} | |
| Return ONLY index numbers of ALL matching products as comma-separated list (e.g., "5,8,12,15"). | |
| If no products found, return ONLY: -1 | |
| DO NOT return empty string or any explanation, ONLY numbers or -1 | |
| Examples of correct responses: | |
| - "2,5,8,12,15,20" (multiple products found) | |
| - "45" (single product found) | |
| - "-1" (no products found)""" | |
| # Check if we have API key before making the request | |
| if not OPENAI_API_KEY: | |
| # Try to find in Trek XML directly as fallback, but avoid tool products | |
| user_message_normalized = user_message.upper() | |
| tool_indicators = ['SUPER B', 'ANAHTAR', 'TAKIMI', 'PENSE', 'TOOL', 'ADAPTÖR', 'CONVERTER'] | |
| should_skip_trek_lookup = any(indicator in user_message_normalized for indicator in tool_indicators) | |
| price, link = None, None | |
| if not should_skip_trek_lookup: | |
| price, link = get_product_price_and_link(user_message) | |
| if price and link: | |
| return [ | |
| f"🚲 **{user_message.title()}**", | |
| f"💰 Fiyat: {price}", | |
| f"🔗 Link: {link}", | |
| "", | |
| "⚠️ **Stok durumu kontrol edilemiyor**", | |
| "📞 Güncel stok için mağazalarımızı arayın:", | |
| "• Caddebostan: 0543 934 0438", | |
| "• Alsancak: 0543 936 2335" | |
| ] | |
| return None | |
| headers = { | |
| "Content-Type": "application/json", | |
| "Authorization": f"Bearer {OPENAI_API_KEY}" | |
| } | |
| # GPT-5.5 modeli temperature ve max_tokens desteklemiyor | |
| payload = { | |
| "model": "gpt-5.5", | |
| "messages": [ | |
| {"role": "system", "content": "You are a product matcher. Find ALL matching products. Return only index numbers."}, | |
| {"role": "user", "content": smart_prompt} | |
| ] | |
| } | |
| try: | |
| response = requests.post( | |
| "https://api.openai.com/v1/chat/completions", | |
| headers=headers, | |
| json=payload, | |
| timeout=10 | |
| ) | |
| if response.status_code == 200: | |
| result = response.json() | |
| indices_str = result['choices'][0]['message']['content'].strip() | |
| # Handle empty response - try Trek XML as fallback, but avoid tool products | |
| if not indices_str or indices_str == "-1": | |
| # Try to find in Trek XML directly, but skip tools | |
| user_message_normalized = user_message.upper() | |
| tool_indicators = ['SUPER B', 'ANAHTAR', 'TAKIMI', 'PENSE', 'TOOL', 'ADAPTÖR', 'CONVERTER'] | |
| should_skip_trek_lookup = any(indicator in user_message_normalized for indicator in tool_indicators) | |
| price, link = None, None | |
| if not should_skip_trek_lookup: | |
| price, link = get_product_price_and_link(user_message) | |
| if price and link: | |
| # Found in Trek XML but not in warehouse stock! | |
| return [ | |
| f"🚲 **{user_message.title()}**", | |
| f"💰 Fiyat: {price}", | |
| f"🔗 Link: {link}", | |
| "", | |
| "❌ **Stok Durumu: TÜKENDİ**", | |
| "", | |
| "📞 Stok güncellemesi veya ön sipariş için mağazalarımızı arayabilirsiniz:", | |
| "• Caddebostan: 0543 934 0438", | |
| "• Alsancak: 0543 936 2335" | |
| ] | |
| return None | |
| try: | |
| # Filter out empty strings and parse indices | |
| indices = [] | |
| for idx in indices_str.split(','): | |
| idx = idx.strip() | |
| if idx and idx.isdigit(): | |
| indices.append(int(idx)) | |
| # Collect all matching products with price/link | |
| all_variants = [] | |
| warehouse_stock = {} | |
| for idx in indices: | |
| if 0 <= idx < len(all_products): | |
| product_block = all_products[idx] | |
| # Get product details | |
| name_match = re.search(r'<ProductName><!\[CDATA\[(.*?)\]\]></ProductName>', product_block) | |
| variant_match = re.search(r'<ProductVariant><!\[CDATA\[(.*?)\]\]></ProductVariant>', product_block) | |
| productcode_match = re.search(r'<ProductCode><!\[CDATA\[(.*?)\]\]></ProductCode>', product_block) | |
| if name_match: | |
| product_name = name_match.group(1) | |
| variant = variant_match.group(1) if variant_match else "" | |
| # Get price and link from Trek website - TRY SKU FIRST (NEW METHOD) | |
| price, link = None, None | |
| # Try SKU-based lookup first if ProductCode exists | |
| product_code = productcode_match.group(1) if productcode_match else None | |
| if product_code and product_code.strip(): | |
| price, link = get_product_price_and_link_by_sku(product_code.strip()) | |
| # Fallback to name-based if SKU didn't work, but be more careful about matching | |
| if not price or not link: | |
| # Only do name-based fallback if the product might reasonably be sold by Trek | |
| # Avoid tools/accessories that clearly don't belong to Trek's bicycle catalog | |
| product_name_normalized = product_name.upper() | |
| # Skip name-based fallback for obvious tools/non-bike products | |
| tool_indicators = ['SUPER B', 'ANAHTAR', 'TAKIMI', 'PENSE', 'TOOL', 'ADAPTÖR', 'CONVERTER'] | |
| should_skip_fallback = any(indicator in product_name_normalized for indicator in tool_indicators) | |
| if not should_skip_fallback: | |
| price, link = get_product_price_and_link(product_name, variant) | |
| variant_info = { | |
| 'name': product_name, | |
| 'variant': variant, | |
| 'price': price, | |
| 'link': link, | |
| 'warehouses': [] | |
| } | |
| # Get warehouse stock | |
| warehouse_regex = r'<Warehouse>.*?<Name><!\[CDATA\[(.*?)\]\]></Name>.*?<Stock>(.*?)</Stock>.*?</Warehouse>' | |
| warehouses = re.findall(warehouse_regex, product_block, re.DOTALL) | |
| for wh_name, wh_stock in warehouses: | |
| try: | |
| stock = int(wh_stock.strip()) | |
| if stock > 0: | |
| display_name = format_warehouse_name(wh_name) | |
| variant_info['warehouses'].append({ | |
| 'name': display_name, | |
| 'stock': stock | |
| }) | |
| if display_name not in warehouse_stock: | |
| warehouse_stock[display_name] = 0 | |
| warehouse_stock[display_name] += stock | |
| except: | |
| pass | |
| if variant_info['warehouses']: | |
| all_variants.append(variant_info) | |
| # Format result | |
| result = [] | |
| if asked_warehouse: | |
| # Filter for specific warehouse | |
| warehouse_variants = [] | |
| for variant in all_variants: | |
| for wh in variant['warehouses']: | |
| if asked_warehouse in wh['name']: | |
| warehouse_variants.append(variant) | |
| break | |
| if warehouse_variants: | |
| result.append(f"{format_warehouse_name(asked_warehouse)} mağazasında mevcut:") | |
| for v in warehouse_variants: | |
| variant_text = f" ({v['variant']})" if v['variant'] else "" | |
| result.append(f"• {v['name']}{variant_text}") | |
| if v['price']: | |
| result.append(f" Fiyat: {v['price']}") | |
| if v['link']: | |
| result.append(f" Link: {v['link']}") | |
| else: | |
| result.append(f"{format_warehouse_name(asked_warehouse)} mağazasında bu ürün mevcut değil") | |
| else: | |
| # Show all variants | |
| if all_variants: | |
| # Group by product name for cleaner display | |
| product_groups = {} | |
| for variant in all_variants: | |
| if variant['name'] not in product_groups: | |
| product_groups[variant['name']] = [] | |
| product_groups[variant['name']].append(variant) | |
| result.append(f"Bulunan ürünler:") | |
| for product_name, variants in product_groups.items(): | |
| result.append(f"\n{product_name}:") | |
| # Show first variant's price and link (usually same for all variants) | |
| if variants[0]['price']: | |
| result.append(f"Fiyat: {variants[0]['price']}") | |
| if variants[0]['link']: | |
| result.append(f"Link: {variants[0]['link']}") | |
| # Show variants and their availability | |
| for v in variants: | |
| if v['variant']: | |
| warehouses_str = ", ".join([w['name'].replace(' mağazası', '') for w in v['warehouses']]) | |
| result.append(f"• {v['variant']}: {warehouses_str}") | |
| else: | |
| # No warehouse stock found - check if product exists in Trek | |
| # But be careful not to match tools/accessories with bikes | |
| user_message_normalized = user_message.upper() | |
| tool_indicators = ['SUPER B', 'ANAHTAR', 'TAKIMI', 'PENSE', 'TOOL', 'ADAPTÖR', 'CONVERTER'] | |
| should_skip_trek_lookup = any(indicator in user_message_normalized for indicator in tool_indicators) | |
| price, link = None, None | |
| if not should_skip_trek_lookup: | |
| price, link = get_product_price_and_link(user_message) | |
| if price and link: | |
| result.append(f"❌ **Stok Durumu: TÜM MAĞAZALARDA TÜKENDİ**") | |
| result.append("") | |
| result.append(f"💰 Web Fiyatı: {price}") | |
| result.append(f"🔗 Ürün Detayları: {link}") | |
| result.append("") | |
| result.append("📞 Stok güncellemesi veya ön sipariş için:") | |
| result.append("• Caddebostan: 0543 934 0438") | |
| result.append("• Alsancak: 0543 936 2335") | |
| else: | |
| return None | |
| # Cache the result before returning | |
| cache['search_results'][cache_key] = { | |
| 'data': result, | |
| 'time': current_time | |
| } | |
| return result | |
| except (ValueError, IndexError) as e: | |
| return None | |
| else: | |
| return None | |
| except Exception as e: | |
| return None | |
| def format_warehouse_name(wh_name): | |
| """Format warehouse name nicely""" | |
| if "CADDEBOSTAN" in wh_name: | |
| return "Caddebostan mağazası" | |
| elif "ORTAKÖY" in wh_name: | |
| return "Ortaköy mağazası" | |
| elif "ALSANCAK" in wh_name: | |
| return "İzmir Alsancak mağazası" | |
| elif "BAHCEKOY" in wh_name or "BAHÇEKÖY" in wh_name: | |
| return "Bahçeköy mağazası" | |
| else: | |
| return wh_name.replace("MAGAZA DEPO", "").strip() |