File size: 2,770 Bytes
872d043
3002e1b
 
8ce97f0
 
872d043
3002e1b
 
 
 
 
 
 
872d043
 
3002e1b
 
872d043
 
 
 
 
 
 
3002e1b
872d043
3002e1b
 
 
8ce97f0
3002e1b
 
872d043
 
 
 
 
 
 
 
 
 
 
8ce97f0
872d043
8ce97f0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
872d043
 
 
8ce97f0
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import requests
from langchain_core.messages import SystemMessage , HumanMessage , FunctionMessage
from .state import State
from .schemas import ResponseFormatter , InfluencerNames
from .prompts import chatbot_prompt , get_inf_name_prompt
from .utils import generate_api_knowledge
from src.genai.utils.models_loader import llm_groq

class ChatbotNode:
    def __init__(self):
        self.llm = llm_groq
    
    def run(self, state:State):
        print('Message:',state['messages'])
        # state['messages'][-1].content = process_query(state['messages'][-1].content)
        template = chatbot_prompt()
        knowledge_base = generate_api_knowledge('https://reveltrends.vercel.app')
        messages = [SystemMessage(content=template), 
                    FunctionMessage(name='analytics_chatbot',content=str(knowledge_base)),
                    ] + state["messages"]
        if len(state['messages'])>11:
            state["messages"] = state["messages"][-9:]
        print('Messages:', state['messages'])
        print(len(state['messages']))
        result = self.llm.with_structured_output(ResponseFormatter).invoke(messages)
        print(result)
        return {
        "messages": [{"role": "assistant", "content": f'''The endpoint is: {result.endpoint}. The parameters are: {result.parameters}'''}],
        "endpoint": result.endpoint,
        "method": result.method,
        "parameters": result.parameters,
    }

class FetchDataNode:
    def __init__(self):
        self.llm = llm_groq
        self.base_url = 'https://reveltrends.vercel.app'
        self.headers = {
            "Authorization": "Bearer YOUR_API_KEY",  # replace with your API key if needed
            "Content-Type": "application/json"
            }

    def run(self, state:State):
        print('Entered to fetch data')
        url = f'''{self.base_url}{state['endpoint']}'''
        if state['method'] == 'GET':
            response = requests.get(url, params=state['parameters'],headers=self.headers)
        elif state['endpoint'] == '/api/v1/compare/':
            print('Condition satisfied')
            messages = [SystemMessage(content=get_inf_name_prompt()), 
            HumanMessage(content=f'''The dictionary of parameters is: {state['parameters']}''')]
            response=llm_groq.with_structured_output(InfluencerNames).invoke(messages)
            payload = {
            "usernames": response.names,
            "freq": state['parameters']['frequency']          
             }
            
            print('The payload is:',payload)
            
            headers = {
            "Content-Type": "application/json"
             }

            response = requests.post(url, json=payload, headers=headers)

        return {'response':response.json()}