File size: 3,497 Bytes
7ddd05c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3328745
 
7ddd05c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3328745
 
 
 
 
 
 
 
 
 
 
 
7ddd05c
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import asyncio
from src.agents.qdrantretrieveuserproxyagent import TrackableQdrantRetrieveUserProxyAgent
from src.agents.retrieveassistantagent import TrackableRetrieveAssistantAgent
import streamlit as st
from qdrant_client import QdrantClient
import glob
import os
from sentence_transformers import SentenceTransformer

class MultiAgentRAGChat:
    def __init__(self, assistant_name, user_proxy_name, llm_config, problem):
        self.assistant = TrackableRetrieveAssistantAgent(name=assistant_name,
                                                 system_message="""you are helpful assistant. Reply "TERMINATE" in 
                                                 the end when everything is done """,
                                                 human_input_mode="NEVER",
                                                 llm_config=llm_config,
                                                 )
        self.user_proxy = TrackableQdrantRetrieveUserProxyAgent(name=user_proxy_name,
                                                    human_input_mode="NEVER",
                                                    max_consecutive_auto_reply=4,
                                                    retrieve_config={
                                                        "task": "code",
                                                        #"docs_path": self.list_files(st.session_state["docs_path"]), This is for loading custion files from directory - logic implemented
                                                        "docs_path": st.session_state["docs_path"],
                                                        "chunk_token_size": 500,
                                                        "model": llm_config["config_list"][0]["model"],
                                                        "client": QdrantClient(":memory:"),
                                                        "embedding_model": self.embeddings_model()
                                                    },
                                                    code_execution_config=False,
                                                    is_termination_msg=lambda x: x.get("content", "").strip().endswith(
                                                      "TERMINATE"))
        self.problem = problem
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)
        
        
    
    def embeddings_model(self):
        sentence_transformer_ef = SentenceTransformer("all-distilroberta-v1").encode    
        return sentence_transformer_ef
   

    def list_files(self,directory):
        # Ensure the directory path ends with a slash
        if not directory.endswith('/'):
            directory += '/'
        try :
            
            # Use glob to get the list of files
            files = glob.glob(os.path.join(directory, '*'))
            file_list = [path.replace('\\', '/') for path in files]
            
            if file_list.count == 0:
                raise ValueError('list of files is zero')
            
        except Exception as ex:
            raise ValueError('issue with file path')
            
        
        return file_list



    async def initiate_chat(self):
        await self.user_proxy.a_initiate_chat(self.assistant,  message=self.user_proxy.message_generator, problem=self.problem, clear_history=st.session_state["chat_with_history"])

    def run(self):
        self.loop.run_until_complete(self.initiate_chat())