File size: 7,821 Bytes
39951bf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3f34ec3
39951bf
06cb4d9
39951bf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3f34ec3
 
39951bf
 
 
 
 
 
3f34ec3
39951bf
 
 
 
 
ed38779
 
 
 
 
 
 
39951bf
 
 
 
 
 
 
 
ed38779
 
 
 
 
 
 
 
 
 
06cb4d9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ed38779
39951bf
 
3f34ec3
06cb4d9
ed38779
 
 
 
 
06cb4d9
 
 
 
3f34ec3
 
 
 
 
 
39951bf
 
 
 
06cb4d9
39951bf
a973b13
39951bf
 
 
 
06cb4d9
39951bf
 
 
 
ed38779
 
 
06cb4d9
3f34ec3
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
#coding: utf-8

import re
from os import getenv
from typing import Any
from typing import Dict
from typing import IO
from typing import List
from typing import Optional
from typing import Tuple
from typing import Union
from var_app import GlobalSystemPrompts
import streamlit as st
from openai import OpenAI

from dotenv import load_dotenv
# Charger les variables d'environnement depuis le fichier .env

class DemorrhaAssistant(object):
    def __init__(self):
        load_dotenv()
        self.client = self.initialize_client()
        self.vector_store = None
        self.list_vector_store_ids = []

    def initialize_client(self):
        # Initialiser le client OpenAI avec la clé API
        api_key = getenv("OPENAI_API_KEY")
        return OpenAI(api_key=api_key)

    def search_assistant(self, assistant_name="Demorrha"):
        last_id = None
        while True:
            # Lister tous les assistants
            assistants_list = self.client.beta.assistants.list(
                order="desc",
                limit="20",
                after=last_id
            )

            if len(assistants_list.data) < 1:
                break

            last_id = assistants_list.data[-1].id
            for assistant in assistants_list.data:
                if assistant.name == assistant_name:
                    return assistant.id
        return None

    def search_vector_store(self, vector_store_name="Demorrha_Style"):
        last_id=None
        while True:
            # Lister tous les assistants
            vector_store_list = self.client.beta.vector_stores.list(
                order="desc",
                limit="20",
                after=last_id
            )

            if len(vector_store_list.data) < 1:
                break
            last_id = vector_store_list.data[-1].id
            for vector_store in vector_store_list.data:
                if vector_store.name == f"{vector_store_name}":
                    return vector_store.id
        return None

    def load_vector_store(self, vector_store_name="Demorrha_Style"):
        vector_store_id = self.search_vector_store(vector_store_name)
        if vector_store_id is None:
            vector_store = self.client.beta.vector_stores.create(name=f"{vector_store_name}")
            self.vector_store = vector_store
        else:
            self.vector_store = self.client.beta.vector_stores.retrieve(vector_store_id)
        return self

    def get_vector_store(self):
        return self.vector_store

    def upload_file(self, 
                    file_path, 
                    purpose="assistants"):
        return self.client.files.create(
            file=open(file_path, "rb"),
            purpose=purpose
        )

    def list_files_in_vector_store(self, vector_store_id):
        files_list = []
        last_id=None
        while True:
            files_list = self.client.beta.vector_stores.files.list(
                vector_store_id=vector_store_id,
                limit="20",
                after=last_id
            )

            if len(files_list.data) < 1:
                break
            last_id = files_list.data[-1].id
            for file in files_list.data:
                files_list.append(file)
        return files_list

    def attach_file_to_vectore_store(self,
                                     vector_store_id,
                                     file_id):
        return self.client.beta.vector_stores.files.create(
            vector_store_id=vector_store_id,
            file_id=file_id
        )

    def load_assistant(self, assistant_name="Demorrha"):
        self.set_system_prompt(GlobalSystemPrompts.linguascribe())
        # system_prompt = GlobalSystemPrompts.linguascribe()
        assistant_id = self.search_assistant(assistant_name)
        if assistant_id is None:
            self.assistant = self.client.beta.assistants.create(
                model="gpt-4o-mini",
                name="Demorrha",
                description="Traite les messages des utilisateurs et génère une traduction.",
                instructions=f"{self.system_prompt}",
                temperature=0.1,
                tools=[{"type": "file_search"}]
            )
        else:
            self.assistant = self.client.beta.assistants.retrieve(assistant_id)
        return self
    
    def get_assistant(self):
        return self.assistant if not isinstance(self.assistant, None) else None
    
    def get_assistant_id(self):
        return self.assistant.id if not isinstance(self.assistant, None) else None

    def add_file_to_vector_store(self, file_paths):
        file_streams = [open(path, "rb") for path in file_paths]
        file_batch = self.client.beta.vector_stores.file_batches.upload_and_poll(
            vector_store_id=self.vector_store.id, files=file_streams
        )
        return file_batch
    
    def set_payload(self, 
                    content_message:str, 
                    operation_prompt: Optional[str] = ""):
        self.payload_content = f'{operation_prompt} :\n"""\n{content_message}\n"""'
        return self

    def set_system_prompt(self, 
                          system_prompt: Optional[str] = ""):
        self.system_prompt = system_prompt
        return self
    
    def add_vector_store_to_ressource(self, vector_store_id):
        self.list_vector_store_ids.append(vector_store_id)
        return self
    
    def get_vector_store_ids(self):
        return self.list_vector_store_ids
    
    def empty_vector_store_ids(self):
        self.list_vector_store_ids = []
        return self

    def update_vector_store_ids(self):
        self.assistant = self.client.beta.assistants.update(
            assistant_id=self.assistant.id,
            tool_resources={"file_search": {"vector_store_ids": self.list_vector_store_ids}},
        )

    def use_assistant(self):
        # Utiliser l'assistant ici
        full_response = ""
        with self.client.beta.threads.create_and_run(
            assistant_id=self.assistant.id,
            thread={
                "messages": [
                    {"role": "user", "content": self.payload_content}
                ]
            },
            stream=True
        ) as stream:
            for event in stream:
                if event.event == "thread.message.delta":
                    full_response += event.data.delta.content[0].text.value
                    yield full_response + "▌"
                elif event.event == "thread.message.completed":
                    yield event.data.content[0].text.value
                    return 

if __name__ == "__main__":
    demorrha = DemorrhaAssistant()
    vector_store = demorrha.load_vector_store().get_vector_store()
    demorrha.empty_vector_store_ids()
    print(vector_store)

    if vector_store.status == "completed":
        if vector_store.file_counts.total > 0:
            if vector_store.file_counts.completed == vector_store.file_counts.total:
                print("Le chargement du vecteur est terminé.")
                demorrha.add_vector_store_to_ressource(vector_store.id)
        else:
            file_paths = ["style.txt"]
            file_batch = demorrha.add_file_to_vector_store(file_paths)
            print("Fichier ajouté au vector_store:", file_batch)

    # Exemple d'utilsation de l'assistant
    demorrha.load_assistant("Demorrha")
    demorrha.update_vector_store_ids()
    demorrha.set_payload("Tu dois faire preuve de courage pour trouver la force.", "Traduit le texte en Anglais. Et applique les instructions du fichier \'style.txt\'")
    response_generator = demorrha.use_assistant()
    final_response = ""
    for response in response_generator:
        print(response, end="\r")
        final_response = response
    print(f"\nRéponse finale de l'assistant: {final_response}")