Spaces:
Running
Running
| import gradio as gr | |
| import torch | |
| from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer, BitsAndBytesConfig | |
| from threading import Thread | |
| import os | |
| from selenium import webdriver | |
| from selenium.webdriver.common.by import By | |
| from selenium.webdriver.common.keys import Keys | |
| from selenium.webdriver.chrome.options import Options | |
| import time | |
| # --- MODEL AYARLARI --- | |
| MODEL_ID = "deepseek-ai/DeepSeek-V3" # veya "deepseek-ai/DeepSeek-V3" dene | |
| # 4-bit Quantization | |
| quantization_config = BitsAndBytesConfig( | |
| load_in_4bit=True, | |
| bnb_4bit_compute_dtype=torch.float16, | |
| bnb_4bit_quant_type="nf4", | |
| bnb_4bit_use_double_quant=True, | |
| ) | |
| print("🔄 Tokenizer yükleniyor...") | |
| tokenizer = AutoTokenizer.from_pretrained(MODEL_ID, trust_remote_code=True) | |
| if tokenizer.pad_token is None: | |
| tokenizer.pad_token = tokenizer.eos_token | |
| print("🔄 Model yükleniyor (bu biraz sürebilir)...") | |
| model = AutoModelForCausalLM.from_pretrained( | |
| MODEL_ID, | |
| quantization_config=quantization_config, | |
| device_map="auto", | |
| trust_remote_code=True | |
| ) | |
| print("✅ Model başarıyla yüklendi!") | |
| def search_google_selenium(query): | |
| """Selenium ile Google araması""" | |
| try: | |
| chrome_options = Options() | |
| chrome_options.add_argument("--headless") | |
| chrome_options.add_argument("--no-sandbox") | |
| chrome_options.add_argument("--disable-dev-shm-usage") | |
| chrome_options.add_argument("--disable-blink-features=AutomationControlled") | |
| chrome_options.add_argument("user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36") | |
| driver = webdriver.Chrome(options=chrome_options) | |
| driver.get("https://www.google.com") | |
| time.sleep(1) | |
| search_box = driver.find_element(By.NAME, "q") | |
| search_box.send_keys(query) | |
| search_box.send_keys(Keys.RETURN) | |
| time.sleep(2) | |
| results = [] | |
| result_blocks = driver.find_elements(By.CSS_SELECTOR, "div.g") | |
| for block in result_blocks[:3]: | |
| try: | |
| title = block.find_element(By.CSS_SELECTOR, "h3").text | |
| link = block.find_element(By.CSS_SELECTOR, "a").get_attribute("href") | |
| desc = block.find_element(By.CSS_SELECTOR, "div.VwiC3b").text | |
| results.append(f"Başlık: {title}\nLink: {link}\nÖzet: {desc}\n") | |
| except: | |
| continue | |
| driver.quit() | |
| return "\n".join(results) if results else "Sonuç bulunamadı." | |
| except Exception as e: | |
| return f"Arama hatası: {str(e)}" | |
| def respond(message, history, internet_search): | |
| system_prompt = "NoiseAI/Noise adlı, 32B parametreli, MagnuX tarafından eğitilmiş bir yapay zekasın. Türkçe konuş ve en iyi kod pratiklerini uygula." | |
| full_prompt = f"<|im_start|>system\n{system_prompt}<|im_end|>\n" | |
| for user_msg, assistant_msg in history: | |
| if user_msg: | |
| full_prompt += f"<|im_start|>user\n{user_msg}<|im_end|>\n" | |
| if assistant_msg: | |
| full_prompt += f"<|im_start|>assistant\n{assistant_msg}<|im_end|>\n" | |
| if internet_search: | |
| search_results = search_google_selenium(message) | |
| message = f"Kullanıcı Sorusu: {message}\n\nİnternet Arama Sonuçları:\n{search_results}\n\nYukarıdaki bilgileri kullanarak cevap ver." | |
| full_prompt += f"<|im_start|>user\n{message}<|im_end|>\n<|im_start|>assistant\n" | |
| inputs = tokenizer([full_prompt], return_tensors="pt").to(model.device) | |
| streamer = TextIteratorStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True) | |
| generation_kwargs = dict( | |
| inputs, | |
| streamer=streamer, | |
| max_new_tokens=2048, | |
| temperature=0.6, | |
| top_p=0.9, | |
| do_sample=True, | |
| ) | |
| thread = Thread(target=model.generate, kwargs=generation_kwargs) | |
| thread.start() | |
| partial_text = "" | |
| for new_text in streamer: | |
| partial_text += new_text | |
| yield partial_text | |
| # Gradio Arayüzü | |
| demo = gr.ChatInterface( | |
| respond, | |
| title="NoiseAI (Local Engine) - 32B", | |
| description="Bu Space, modeli kendi donanımında (4-bit Quantized) çalıştırır.", | |
| additional_inputs=[ | |
| gr.Checkbox(label="🌐 İnternette Ara", value=False) | |
| ] | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch(server_name="0.0.0.0", server_port=7860) |