Spaces:
Sleeping
Sleeping
| import requests | |
| from langchain_core.messages import SystemMessage , HumanMessage , FunctionMessage | |
| from .state import State | |
| from .schemas import ResponseFormatter , InfluencerNames | |
| from .prompts import chatbot_prompt , get_inf_name_prompt | |
| from .utils import generate_api_knowledge | |
| from src.genai.utils.models_loader import llm_gpt | |
| class ChatbotNode: | |
| def __init__(self): | |
| self.llm = llm_gpt | |
| def run(self, state:State): | |
| print('Message:',state['messages']) | |
| # state['messages'][-1].content = process_query(state['messages'][-1].content) | |
| template = chatbot_prompt() | |
| knowledge_base = generate_api_knowledge('https://reveltrends.vercel.app') | |
| messages = [SystemMessage(content=template), | |
| FunctionMessage(name='analytics_chatbot',content=str(knowledge_base)), | |
| ] + state["messages"] | |
| if len(state['messages'])>11: | |
| state["messages"] = state["messages"][-9:] | |
| print('Messages:', state['messages']) | |
| print(len(state['messages'])) | |
| result = self.llm.with_structured_output(ResponseFormatter, method='function_calling').invoke(messages) | |
| print('The result is:',result) | |
| return { | |
| "messages": [{"role": "assistant", "content": f'''The endpoint is: {result.endpoint}. The parameters are: {result.parameters}'''}], | |
| "endpoint": result.endpoint, | |
| "method": result.method, | |
| "parameters": result.parameters, | |
| } | |
| class FetchDataNode: | |
| def __init__(self): | |
| self.llm = llm_gpt | |
| self.base_url = 'https://reveltrends.vercel.app' | |
| self.headers = { | |
| "Authorization": "Bearer YOUR_API_KEY", # replace with your API key if needed | |
| "Content-Type": "application/json" | |
| } | |
| def run(self, state:State): | |
| print('Entered to fetch data') | |
| url = f'''{self.base_url}{state['endpoint']}''' | |
| if state['method'] == 'GET': | |
| response = requests.get(url, params=state['parameters'],headers=self.headers) | |
| elif state['endpoint'] == '/api/v1/compare/': | |
| print('Condition satisfied') | |
| messages = [SystemMessage(content=get_inf_name_prompt()), | |
| HumanMessage(content=f'''The dictionary of parameters is: {state['parameters']}''')] | |
| response=llm_gpt.with_structured_output(InfluencerNames).invoke(messages) | |
| payload = { | |
| "usernames": response.names, | |
| "freq": state['parameters']['frequency'] | |
| } | |
| print('The payload is:',payload) | |
| headers = { | |
| "Content-Type": "application/json" | |
| } | |
| response = requests.post(url, json=payload, headers=headers) | |
| return {'response':response.json()} | |