Spaces:
Sleeping
Sleeping
Upload llmConnect.py
Browse files- llmConnect.py +69 -0
llmConnect.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import google.genai as genai
|
| 2 |
+
client = genai.Client()
|
| 3 |
+
from google.genai import types
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def readSystemInstructions():
|
| 7 |
+
with open("systemInstruction.txt", "r") as f:
|
| 8 |
+
instructions = f.read()
|
| 9 |
+
return instructions
|
| 10 |
+
def testConnection():
|
| 11 |
+
print("Testing connection to Google GenAI")
|
| 12 |
+
try:
|
| 13 |
+
client.list_models()
|
| 14 |
+
print("Connection successful")
|
| 15 |
+
except Exception as e:
|
| 16 |
+
print("Connection failed")
|
| 17 |
+
print(e)
|
| 18 |
+
|
| 19 |
+
spamFile = client.files.upload(file="spamDataset.txt")
|
| 20 |
+
|
| 21 |
+
def createCache():
|
| 22 |
+
cache = client.caches.create(
|
| 23 |
+
model="gemini-1.5-flash-001",
|
| 24 |
+
config=types.CreateCachedContentConfig(
|
| 25 |
+
system_instruction=readSystemInstructions(),
|
| 26 |
+
display_name='spamEmail', # used to identify the cache
|
| 27 |
+
contents=[spamFile] ,
|
| 28 |
+
ttl="86400s",
|
| 29 |
+
)
|
| 30 |
+
)
|
| 31 |
+
#print(cache.name)
|
| 32 |
+
return cache.name
|
| 33 |
+
|
| 34 |
+
def chat(message:str,history:list):
|
| 35 |
+
#print(history)
|
| 36 |
+
"""
|
| 37 |
+
history = [{'role': 'user', 'metadata': None, 'content': 'hi ', 'options': None},
|
| 38 |
+
{'role': 'assistant', 'metadata': None,
|
| 39 |
+
'content': 'Hi there! How can I help you with your security needs today? I can help you identify potential spam or social engineering attacks in emails or texts, or guide you through account recovery processes.',
|
| 40 |
+
'options': None}]
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
#formatedHistory = []
|
| 44 |
+
"""for i in history:
|
| 45 |
+
formatedHistory.append({"role":i["role"],
|
| 46 |
+
"parts":[
|
| 47 |
+
{"text":i["content"]}
|
| 48 |
+
]
|
| 49 |
+
}
|
| 50 |
+
)
|
| 51 |
+
print(formatedHistory)"""
|
| 52 |
+
chat = client.chats.create(model="gemini-1.5-flash-001",
|
| 53 |
+
config=types.GenerateContentConfig(
|
| 54 |
+
|
| 55 |
+
cached_content="cachedContents/mvsoytvx3qdt",
|
| 56 |
+
),
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
response = chat.send_message(message)
|
| 60 |
+
print(response.text)
|
| 61 |
+
return str(response.text)
|
| 62 |
+
|
| 63 |
+
if __name__ == "__main__":
|
| 64 |
+
#createCache()
|
| 65 |
+
for cache in client.caches.list():
|
| 66 |
+
print(cache)
|
| 67 |
+
#client.caches.delete(name=cache.name)
|
| 68 |
+
|
| 69 |
+
|