aspect2309 commited on
Commit
7262082
·
verified ·
1 Parent(s): eab0a74

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +88 -0
app.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ from langchain_community.embeddings import HuggingFaceEmbeddings
3
+ from langchain_community.vectorstores import Chroma
4
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
5
+ from langchain.document_loaders import TextLoader
6
+ from langchain.chains import RetrievalQA
7
+ from langchain.llms.base import LLM
8
+
9
+ from typing import List, Optional
10
+ from groq import Groq
11
+ import os
12
+
13
+
14
+
15
+ loader = TextLoader("/content/Project.txt")
16
+ documents = loader.load()
17
+
18
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=50)
19
+ docs = text_splitter.split_documents(documents)
20
+
21
+
22
+ embedding = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
23
+ vectorstore = Chroma.from_documents(docs, embedding, persist_directory="rag_chroma_groq")
24
+
25
+
26
+
27
+ class GroqLLM(LLM):
28
+ model: str = "llama3-8b-8192"
29
+ api_key: str = "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" # Replace with your actual API key
30
+ temperature: float = 0.0
31
+
32
+ def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
33
+ client = Groq(api_key=self.api_key)
34
+
35
+ messages = [
36
+ {"role": "system", "content": "You are a helpful assistant."},
37
+ {"role": "user", "content": prompt}
38
+ ]
39
+
40
+ response = client.chat.completions.create(
41
+ model=self.model,
42
+ messages=messages,
43
+ temperature=self.temperature,
44
+ )
45
+
46
+ return response.choices[0].message.content
47
+
48
+ @property
49
+ def _llm_type(self) -> str:
50
+ return "groq-llm"
51
+
52
+
53
+
54
+ retriever = vectorstore.as_retriever()
55
+ groq_llm = GroqLLM(api_key="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")
56
+
57
+ qa_chain = RetrievalQA.from_chain_type(
58
+ llm=groq_llm,
59
+ retriever=retriever,
60
+ return_source_documents=True
61
+ )
62
+
63
+
64
+
65
+ query = "Explain the whole project in points and sections"
66
+ result = qa_chain({"query": query})
67
+ print("Answer:", result["result"])
68
+
69
+ import gradio as gr
70
+
71
+ # Ensure qa_chain is defined (from your code above)
72
+
73
+ # Define the function that will be called when the user submits a question
74
+ def answer_query(query):
75
+ result = qa_chain({"query": query})
76
+ return result["result"]
77
+
78
+ # Create the Gradio interface
79
+ interface = gr.Interface(
80
+ fn=answer_query,
81
+ inputs=gr.Textbox(lines=2, placeholder="Ask me anything about the project..."),
82
+ outputs="text",
83
+ title="🧠 DermaBot RAG Assistant",
84
+ description="Ask questions based on my projects"
85
+ )
86
+
87
+ # Launch the interface
88
+ interface.launch()