|
|
from flask import Flask, render_template, request, jsonify, session |
|
|
from langchain_google_genai import ChatGoogleGenerativeAI |
|
|
from langchain_huggingface import HuggingFaceEmbeddings |
|
|
from langchain_chroma import Chroma |
|
|
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder |
|
|
from langchain.chains import create_history_aware_retriever, create_retrieval_chain |
|
|
from langchain.chains.combine_documents import create_stuff_documents_chain |
|
|
from langchain_core.messages import HumanMessage, AIMessage |
|
|
from dotenv import load_dotenv |
|
|
import os |
|
|
|
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
app = Flask(__name__, template_folder='templates') |
|
|
app.secret_key = os.urandom(24) |
|
|
|
|
|
|
|
|
vectorstore = None |
|
|
llm = None |
|
|
retriever = None |
|
|
rag_chain = None |
|
|
|
|
|
try: |
|
|
|
|
|
|
|
|
embedding_model = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2") |
|
|
|
|
|
|
|
|
vectorstore = Chroma( |
|
|
persist_directory="data", |
|
|
embedding_function=embedding_model |
|
|
) |
|
|
retriever = vectorstore.as_retriever(search_kwargs={"k": 5}) |
|
|
print("Vectorstore berhasil dimuat dan retriever dibuat.") |
|
|
|
|
|
|
|
|
llm = ChatGoogleGenerativeAI(model="gemini-1.5-flash", temperature=0.2) |
|
|
print("Model AI (Gemini) berhasil diinisialisasi.") |
|
|
|
|
|
|
|
|
contextualize_q_prompt = ChatPromptTemplate.from_messages([ |
|
|
("system", "Mengingat riwayat percakapan dan pertanyaan terbaru, formulasikan ulang pertanyaan menjadi pertanyaan yang berdiri sendiri."), |
|
|
MessagesPlaceholder("chat_history"), |
|
|
("human", "{input}"), |
|
|
]) |
|
|
history_aware_retriever = create_history_aware_retriever(llm, retriever, contextualize_q_prompt) |
|
|
|
|
|
qa_prompt = ChatPromptTemplate.from_messages([ |
|
|
("system", "Anda adalah asisten AI untuk BPVP Kota Sorong. Gunakan potongan konteks berikut untuk menjawab pertanyaan. Jika tidak tahu jawabannya, katakan saja Anda tidak tahu. Jawab dalam bahasa Indonesia.\n\nKonteks:\n{context}"), |
|
|
MessagesPlaceholder("chat_history"), |
|
|
("human", "{input}"), |
|
|
]) |
|
|
question_answer_chain = create_stuff_documents_chain(llm, qa_prompt) |
|
|
rag_chain = create_retrieval_chain(history_aware_retriever, question_answer_chain) |
|
|
print("RAG Chain berhasil dibuat.") |
|
|
|
|
|
except Exception as e: |
|
|
print(f"GALAT PENTING SAAT INISIALISASI: {e}") |
|
|
|
|
|
|
|
|
|
|
|
@app.route('/') |
|
|
def home(): |
|
|
session.pop("chat_history", None) |
|
|
return render_template('index.html') |
|
|
|
|
|
@app.route('/get', methods=['GET']) |
|
|
def get_response(): |
|
|
if not rag_chain: |
|
|
return jsonify({"error": "Server belum siap. Periksa log untuk galat inisialisasi."}), 503 |
|
|
|
|
|
user_message = request.args.get('msg') |
|
|
if not user_message: |
|
|
return jsonify({"error": "Pesan tidak boleh kosong."}), 400 |
|
|
|
|
|
chat_history_from_session = session.get("chat_history", []) |
|
|
chat_history = [HumanMessage(content=msg["message"]) if msg.get("sender") == "user" else AIMessage(content=msg["message"]) for msg in chat_history_from_session] |
|
|
|
|
|
try: |
|
|
response = rag_chain.invoke({"input": user_message, "chat_history": chat_history}) |
|
|
answer = response.get("answer", "Maaf, saya tidak dapat menemukan jawaban untuk itu.") |
|
|
|
|
|
new_history = session.get("chat_history", []) |
|
|
new_history.append({"sender": "user", "message": user_message}) |
|
|
new_history.append({"sender": "ai", "message": answer}) |
|
|
session["chat_history"] = new_history |
|
|
|
|
|
return jsonify(answer) |
|
|
|
|
|
except Exception as e: |
|
|
print(f"GALAT saat menjalankan RAG Chain: {e}") |
|
|
return jsonify({"error": "Maaf, terjadi masalah internal saat memproses permintaan Anda."}), 500 |
|
|
|
|
|
if __name__ == '__main__': |
|
|
app.run(debug=True) |