File size: 2,298 Bytes
19d2432
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b9fb9dc
19d2432
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import google.genai as genai 
client = genai.Client()
from google.genai import types


def readSystemInstructions():
    with open("systemInstruction.txt", "r") as f:
        instructions = f.read()
    return instructions
def testConnection():
    print("Testing connection to Google GenAI")
    try:
        client.list_models()
        print("Connection successful")
    except Exception as e:
        print("Connection failed")
        print(e)

#spamFile = client.files.upload(file="spamDataset.txt")

def createCache():
    cache = client.caches.create(
        model="gemini-1.5-flash-001",
        config=types.CreateCachedContentConfig(
            system_instruction=readSystemInstructions(),
        display_name='spamEmail', # used to identify the cache
        contents=[spamFile] ,
        ttl="86400s",
    )
    )
    #print(cache.name)
    return cache.name

def chat(message:str,history:list):
    #print(history)
    """
    history = [{'role': 'user', 'metadata': None, 'content': 'hi ', 'options': None}, 
               {'role': 'assistant', 'metadata': None, 
                'content': 'Hi there! How can I help you with your security needs today? I can help you identify potential spam or social engineering attacks in emails or texts, or guide you through account recovery processes.', 
                'options': None}]
    """
    
    #formatedHistory = [] 
    """for i in history:
        formatedHistory.append({"role":i["role"],
                                "parts":[
                                    {"text":i["content"]}
                                        ]
                                }
                                )
    print(formatedHistory)"""
    chat = client.chats.create(model="gemini-1.5-flash-001",
                               config=types.GenerateContentConfig(
                                            
                                            cached_content="cachedContents/mvsoytvx3qdt",
                                        ),
                            ) 
    
    response = chat.send_message(message)
    print(response.text) 
    return str(response.text)

if __name__ == "__main__":
    #createCache()
    for cache in client.caches.list():
        print(cache)
        #client.caches.delete(name=cache.name)