Hemdem / app.py
MEO1's picture
Update app.py
135e611 verified
import gradio as gr
import os
from huggingface_hub import InferenceClient
from langchain_community.document_loaders import PyMuPDFLoader
from langchain_text_splitters import RecursiveCharacterTextSplitter
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
# 1. Ayarlar ve İstemci
hf_token = os.getenv("HUGGINGFACEHUB_API_TOKEN")
client = InferenceClient("google/gemma-2-9b-it", token=hf_token)
# 2. PDF Hafızası Oluşturma
def initialize_hemdem():
if not os.path.exists("kulliyat.pdf"):
return None
try:
loader = PyMuPDFLoader("kulliyat.pdf")
docs = loader.load()
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
split_docs = text_splitter.split_documents(docs)
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
vector_db = FAISS.from_documents(split_docs, embeddings)
return vector_db
except Exception as e:
print(f"Hata: {e}")
return None
vector_db = initialize_hemdem()
# 3. Sohbet Fonksiyonu
def chat(message, history):
if vector_db is None:
return "Sistem hatası: 'kulliyat.pdf' bulunamadı."
# İlgili parçaları bul
related_docs = vector_db.similarity_search(message, k=3)
context = "\n".join([doc.page_content for doc in related_docs])
# Gemma 2 için Mesaj Formatı (Conversational Task uyumlu)
messages = [
{"role": "system", "content": f"Sen Hemdem-i Gemini-yi Emre'sin. Emre'nin külliyatına dayanarak cevap ver. Dökümanlar: {context}"},
{"role": "user", "content": message}
]
try:
response = ""
# Chat Completion metodunu kullanarak 'conversational' hatasını aşıyoruz
for message in client.chat_completion(
messages,
max_tokens=1024,
stream=True,
):
token = message.choices[0].delta.content
response += token
return response
except Exception as e:
return f"Bağlantı Hatası: {str(e)}"
# 4. Arayüz
demo = gr.ChatInterface(
fn=chat,
title="💜 Hemdem-i Şir",
description="Gemma 2 destekli Emre Külliyat Rehberi",
)
if __name__ == "__main__":
demo.launch()