Spaces:
Sleeping
Sleeping
File size: 2,386 Bytes
fc670b4 b91b29d 7107ac8 4949586 ee8c5b2 4d53c99 4949586 4d53c99 4949586 4d53c99 4949586 4d53c99 f257a89 4949586 f257a89 4949586 f257a89 fc670b4 4d53c99 4949586 fc670b4 ee8c5b2 4d53c99 fc670b4 ee8c5b2 fc670b4 4d53c99 ee8c5b2 4d53c99 ee8c5b2 fc670b4 4d53c99 b91b29d 4d53c99 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 |
import os
import gradio as gr
from langchain_community.vectorstores import FAISS
from langchain_huggingface import HuggingFaceEmbeddings
from groq import Groq
# β
Path FAISS index
faiss_path = "faiss_index"
# β
Pastikan FAISS index ada sebelum loading
if not os.path.exists(f"{faiss_path}/index.faiss"):
raise FileNotFoundError(f"β οΈ FAISS index tidak ditemukan di {faiss_path}. Pastikan Anda telah mengunggahnya!")
# β
Load FAISS index
vector_store = FAISS.load_local(
faiss_path,
HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2"),
allow_dangerous_deserialization=True
)
# β
Load API Key dari Secrets di Hugging Face Spaces
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
if not GROQ_API_KEY:
raise ValueError("β οΈ API Key Groq tidak ditemukan! Setel variabel lingkungan 'GROQ_API_KEY'.")
# β
Inisialisasi API Groq
client = Groq(api_key=GROQ_API_KEY)
def retrieve_and_generate(query, history=[]):
"""π Retrieve dokumen & π§ Generate jawaban dari LLM."""
# π Ambil 3 dokumen yang paling relevan
docs = vector_store.similarity_search(query, k=3)
context = "\n\n".join([doc.page_content for doc in docs])
# π§ Generate respons dengan model Groq
response = client.chat.completions.create(
model="mixtral-8x7b-32768",
messages=[
{"role": "system", "content": "Anda adalah asisten AI yang menjawab pertanyaan tentang RoboHome berdasarkan dokumen ini."},
{"role": "user", "content": f"{context}\n\nPertanyaan: {query}"}
],
temperature=0.7,
max_tokens=200
)
bot_response = response.choices[0].message.content
history.append((query, bot_response)) # β
Simpan chat history
return history
# β
UI dengan Gradio
with gr.Blocks() as demo:
gr.Markdown("## π€ RoboHome RAG Chatbot")
gr.Markdown("Chatbot ini menjawab pertanyaan berdasarkan dokumentasi RoboHome.")
chatbot = gr.Chatbot(label="π¬ Jawaban RoboHome")
input_text = gr.Textbox(label="βοΈ Ajukan pertanyaan tentang RoboHome", placeholder="Ketik pertanyaan di sini...")
send_button = gr.Button("π Kirim")
def process_input(user_input, history):
return retrieve_and_generate(user_input, history)
send_button.click(process_input, inputs=[input_text, chatbot], outputs=chatbot)
demo.launch(share=True)
|