securityChatbot / llmConnect.py
Sarathrsk03's picture
Update llmConnect.py
b9fb9dc verified
import google.genai as genai
client = genai.Client()
from google.genai import types
def readSystemInstructions():
with open("systemInstruction.txt", "r") as f:
instructions = f.read()
return instructions
def testConnection():
print("Testing connection to Google GenAI")
try:
client.list_models()
print("Connection successful")
except Exception as e:
print("Connection failed")
print(e)
#spamFile = client.files.upload(file="spamDataset.txt")
def createCache():
cache = client.caches.create(
model="gemini-1.5-flash-001",
config=types.CreateCachedContentConfig(
system_instruction=readSystemInstructions(),
display_name='spamEmail', # used to identify the cache
contents=[spamFile] ,
ttl="86400s",
)
)
#print(cache.name)
return cache.name
def chat(message:str,history:list):
#print(history)
"""
history = [{'role': 'user', 'metadata': None, 'content': 'hi ', 'options': None},
{'role': 'assistant', 'metadata': None,
'content': 'Hi there! How can I help you with your security needs today? I can help you identify potential spam or social engineering attacks in emails or texts, or guide you through account recovery processes.',
'options': None}]
"""
#formatedHistory = []
"""for i in history:
formatedHistory.append({"role":i["role"],
"parts":[
{"text":i["content"]}
]
}
)
print(formatedHistory)"""
chat = client.chats.create(model="gemini-1.5-flash-001",
config=types.GenerateContentConfig(
cached_content="cachedContents/mvsoytvx3qdt",
),
)
response = chat.send_message(message)
print(response.text)
return str(response.text)
if __name__ == "__main__":
#createCache()
for cache in client.caches.list():
print(cache)
#client.caches.delete(name=cache.name)