Spaces:
Sleeping
Sleeping
File size: 3,767 Bytes
9cde9c0 da72d61 9cde9c0 46a479a 9cde9c0 ef5b9aa 9cde9c0 ed596e9 9cde9c0 ef5b9aa 9cde9c0 ed596e9 9cde9c0 ed596e9 9cde9c0 25f0c44 9cde9c0 da72d61 9cde9c0 ed596e9 9cde9c0 b4250e6 9cde9c0 ed596e9 9cde9c0 b4250e6 ed596e9 25f0c44 9cde9c0 25f0c44 9cde9c0 25f0c44 b4250e6 25f0c44 9cde9c0 da72d61 9cde9c0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 |
import os
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.vectorstores import FAISS
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_openai import ChatOpenAI
from langchain.memory import ConversationSummaryMemory
import gradio as gr
from PyPDF2 import PdfReader
from langchain.agents import initialize_agent, Tool
from langchain_core.exceptions import OutputParserException
apiKey = os.getenv("OPENAI_API_KEY")
# Load PDF
def read_pdf(file_paths):
combined_text = ""
for file_path in file_paths:
with open(file_path, "rb") as file:
reader = PdfReader(file)
text = ""
for page in reader.pages:
text += page.extract_text()
combined_text += text + "\n\n"
return combined_text
pdf_file_path = ["property_law.pdf","ipc.pdf","constitution_of_india.pdf","ipc_2.pdf","cn_2.pdf","pl_2.pdf"]
document_text = read_pdf(pdf_file_path)
text_splitter = RecursiveCharacterTextSplitter(
chunk_size=1000,
chunk_overlap=100
)
chunks = text_splitter.split_text(document_text)
embeddings = OpenAIEmbeddings(openai_api_key=apiKey)
vector_db = FAISS.from_texts(chunks, embeddings)
exceptionMsg = "Sorry, I couldn't understand your question. Please ask a specific question regarding IPC, Transfer of Property and Constitution of India."
def retrieve_from_db(query):
results = vector_db.similarity_search(query, k=1)
return results[0].page_content
llm = ChatOpenAI(openai_api_key=apiKey)
tools = [
Tool(
name="Legal-Library",
func=retrieve_from_db,
description=(
"Searches a legal document database including the Indian Penal Code, "
"Constitution of India, and Transfer of Property Act to retrieve accurate, "
"contextual, and relevant legal information. Use this tool for queries "
"related to specific laws, sections, or provisions in these documents."
)
)
]
memory = ConversationSummaryMemory(llm=llm)
agent = initialize_agent(
tools=tools,
agent_type="zero-shot-react-description",
llm=llm,
memory=memory,
handle_parsing_errors=True
)
def chatbot(input_text, chat_history):
try:
response = agent.run(input_text)
if response == "N/A":
response = exceptionMsg
memory.save_context({"user": input_text}, {"assistant": response})
chat_history.append([input_text, response])
return chat_history, ""
except OutputParserException as e:
error_message = exceptionMsg
chat_history.append([error_message, input_text])
print("Error:", str(e))
return chat_history, ""
def clear_chat():
return [],""
def gradio_interface():
with gr.Blocks() as demo:
gr.Markdown("""
<div style="text-align: center;">
<h1>Legal Query Chatbot</h1>
</div>
""")
with gr.Column():
chatbot_ui = gr.Chatbot()
user_input = gr.Textbox(placeholder="Ask your legal questions here, such as IPC sections, property laws, constitution articles.")
submit_button = gr.Button("Submit", elem_classes="gr-button", variant="primary")
clear_chat_button = gr.Button("Clear Chat", variant="secondary")
submit_button.click(fn=chatbot, inputs=[user_input, chatbot_ui], outputs=[chatbot_ui, user_input])
user_input.submit(fn=chatbot, inputs=[user_input, chatbot_ui], outputs=[chatbot_ui, user_input])
clear_chat_button.click(fn=clear_chat, inputs=None, outputs=[chatbot_ui, user_input])
return demo
app = gradio_interface()
if __name__ == "__main__":
app.launch() |