dhanvanth183 commited on
Commit
1d8c363
·
verified ·
1 Parent(s): bf2bc7a

Upload Groq_llms.py

Browse files
Files changed (1) hide show
  1. Groq_llms.py +50 -0
Groq_llms.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ #from langchain_community.chat_models import ChatOpenAI
3
+ from langchain_groq import ChatGroq
4
+ from dotenv import load_dotenv
5
+
6
+ load_dotenv()
7
+
8
+ class LLMHandler:
9
+ def __init__(self, model_name="llama-3.3-70b-versatile"):
10
+ """
11
+ Initializes the LLMHandler with the specified Groq model.
12
+ """
13
+ self.groq_api_key = os.getenv("GROQ_API_KEY")
14
+ if not self.groq_api_key:
15
+ raise ValueError("GROQ_API_KEY environment variable not set.")
16
+
17
+ # Initialize Groq LLM client
18
+ self.llm = ChatGroq(groq_api_key=self.groq_api_key, model_name=model_name)
19
+
20
+ def generate_response(self, user_prompt, data):
21
+ """
22
+ Generate a concise response using the LLM based on user prompt and data.
23
+ :param user_prompt: Prompt provided by the user.
24
+ :param data: Dictionary containing the instance information (e.g., UID, Name, etc.).
25
+ :return: Generated response text.
26
+ """
27
+ # Create the full prompt using user input and instance data
28
+ prompt = (
29
+ f"You are a professional AI model tasked with writing personalized invite texts "
30
+ f"that are concise (less than 40 words), brochure-suitable, and tailored as per the category in the given sample."
31
+ f"\n\n"
32
+ f"Consider the user prompt: {user_prompt}\n\n"
33
+ f"Details of the individual:\n"
34
+ f"- Name: {data['Name']}\n"
35
+ f"- Job Title: {data['Job Title']}\n"
36
+ f"- Organisation: {data['Organisation']}\n"
37
+ f"- Area of Interest: {data['Area of Interest']}\n"
38
+ f"- Category: {data['Category']}\n\n"
39
+ f"The response can start with Hello Name"
40
+ f"Write a personalized invitation text for this individual, ensuring the tone and purpose align with the user's instructions."
41
+ f"STRICTLY Give only one response for the Category the sample belongs to."
42
+ f"Do Not mention Category in the response."
43
+
44
+ f"NO PREAMBLE"
45
+ )
46
+
47
+ # Query the LLM and return the response
48
+ response = self.llm.invoke(prompt)
49
+ return response.content.strip()
50
+